add hcl2template pkg
Package hcl2template defines code to parse hcl2 template files correctly. In order to configure a packer builder,provisioner,communicator and post processor. Checkout the files in testdata/complete/ to see what a packer config could look like.
This commit is contained in:
parent
078ba7c8c3
commit
2b0e0d4eab
|
@ -0,0 +1,101 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
)
|
||||
|
||||
type Artifacts map[ArtifactRef]*Artifact
|
||||
|
||||
type Artifact struct {
|
||||
Type string
|
||||
Name string
|
||||
|
||||
DeclRange hcl.Range
|
||||
|
||||
Config hcl.Body
|
||||
}
|
||||
|
||||
func (a *Artifact) Ref() ArtifactRef {
|
||||
return ArtifactRef{
|
||||
Type: a.Type,
|
||||
Name: a.Name,
|
||||
}
|
||||
}
|
||||
|
||||
type ArtifactRef struct {
|
||||
Type string
|
||||
Name string
|
||||
}
|
||||
|
||||
// NoArtifact is the zero value of ArtifactRef, representing the absense of an
|
||||
// artifact.
|
||||
var NoArtifact ArtifactRef
|
||||
|
||||
func artifactRefFromAbsTraversal(t hcl.Traversal) (ArtifactRef, hcl.Diagnostics) {
|
||||
var diags hcl.Diagnostics
|
||||
if len(t) != 3 {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid artifact reference",
|
||||
Detail: "An artifact reference must have three parts separated by periods: the keyword \"artifact\", the builder type name, and the artifact name.",
|
||||
Subject: t.SourceRange().Ptr(),
|
||||
})
|
||||
return NoArtifact, diags
|
||||
}
|
||||
|
||||
if t.RootName() != "artifact" {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid artifact reference",
|
||||
Detail: "The first part of an artifact reference must be the keyword \"artifact\".",
|
||||
Subject: t[0].SourceRange().Ptr(),
|
||||
})
|
||||
return NoArtifact, diags
|
||||
}
|
||||
btStep, ok := t[1].(hcl.TraverseAttr)
|
||||
if !ok {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid artifact reference",
|
||||
Detail: "The second part of an artifact reference must be an identifier giving the builder type of the artifact.",
|
||||
Subject: t[1].SourceRange().Ptr(),
|
||||
})
|
||||
return NoArtifact, diags
|
||||
}
|
||||
nameStep, ok := t[2].(hcl.TraverseAttr)
|
||||
if !ok {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid artifact reference",
|
||||
Detail: "The third part of an artifact reference must be an identifier giving the name of the artifact.",
|
||||
Subject: t[2].SourceRange().Ptr(),
|
||||
})
|
||||
return NoArtifact, diags
|
||||
}
|
||||
|
||||
return ArtifactRef{
|
||||
Type: btStep.Name,
|
||||
Name: nameStep.Name,
|
||||
}, diags
|
||||
}
|
||||
|
||||
func (r ArtifactRef) String() string {
|
||||
return fmt.Sprintf("%s.%s", r.Type, r.Name)
|
||||
}
|
||||
|
||||
// decodeBodyWithoutSchema is a generic alternative to hcldec.Decode that
|
||||
// just extracts whatever attributes are present and rejects any nested blocks,
|
||||
// for compatibility with legacy builders that can't provide explicit schema.
|
||||
func decodeBodyWithoutSchema(body hcl.Body, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
|
||||
attrs, diags := body.JustAttributes()
|
||||
vals := make(map[string]cty.Value)
|
||||
for name, attr := range attrs {
|
||||
val, moreDiags := attr.Expr.Value(ctx)
|
||||
diags = append(diags, moreDiags...)
|
||||
vals[name] = val
|
||||
}
|
||||
return cty.ObjectVal(vals), diags
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
// Package hcl2template defines code to parse hcl2 template files correctly.
|
||||
//
|
||||
// In order to configure a packer builder,provisioner,communicator and post
|
||||
// processor.
|
||||
//
|
||||
// Checkout the files in testdata/complete/ to see what a packer config could
|
||||
// look like.
|
||||
//
|
||||
package hcl2template
|
|
@ -0,0 +1,333 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
awscommon "github.com/hashicorp/packer/builder/amazon/common"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/hclparse"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
|
||||
"github.com/hashicorp/packer/helper/communicator"
|
||||
|
||||
amazonebs "github.com/hashicorp/packer/builder/amazon/ebs"
|
||||
"github.com/hashicorp/packer/builder/virtualbox/iso"
|
||||
|
||||
"github.com/hashicorp/packer/provisioner/file"
|
||||
"github.com/hashicorp/packer/provisioner/shell"
|
||||
|
||||
amazon_import "github.com/hashicorp/packer/post-processor/amazon-import"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
)
|
||||
|
||||
func getBasicParser() *Parser {
|
||||
return &Parser{
|
||||
Parser: hclparse.NewParser(),
|
||||
ProvisionersSchemas: map[string]Decodable{
|
||||
"shell": &shell.Config{},
|
||||
"file": &file.Config{},
|
||||
},
|
||||
PostProvisionersSchemas: map[string]Decodable{
|
||||
"amazon-import": &amazon_import.Config{},
|
||||
},
|
||||
CommunicatorSchemas: map[string]Decodable{
|
||||
"ssh": &communicator.SSH{},
|
||||
"winrm": &communicator.WinRM{},
|
||||
},
|
||||
SourceSchemas: map[string]Decodable{
|
||||
"amazon-ebs": &amazonebs.Config{},
|
||||
"virtualbox-iso": &iso.Config{},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestParser_ParseFile(t *testing.T) {
|
||||
defaultParser := getBasicParser()
|
||||
|
||||
type fields struct {
|
||||
Parser *hclparse.Parser
|
||||
}
|
||||
type args struct {
|
||||
filename string
|
||||
cfg *PackerConfig
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
parser *Parser
|
||||
args args
|
||||
wantPackerConfig *PackerConfig
|
||||
wantDiags bool
|
||||
}{
|
||||
{
|
||||
"valid " + sourceLabel + " load",
|
||||
defaultParser,
|
||||
args{"testdata/sources/basic.pkr.hcl", new(PackerConfig)},
|
||||
&PackerConfig{
|
||||
Sources: map[SourceRef]*Source{
|
||||
SourceRef{
|
||||
Type: "virtualbox-iso",
|
||||
Name: "ubuntu-1204",
|
||||
}: {
|
||||
Type: "virtualbox-iso",
|
||||
Name: "ubuntu-1204",
|
||||
Cfg: &iso.FlatConfig{
|
||||
HTTPDir: strPtr("xxx"),
|
||||
ISOChecksum: strPtr("769474248a3897f4865817446f9a4a53"),
|
||||
ISOChecksumType: strPtr("md5"),
|
||||
RawSingleISOUrl: strPtr("http://releases.ubuntu.com/12.04/ubuntu-12.04.5-server-amd64.iso"),
|
||||
BootCommand: []string{"..."},
|
||||
ShutdownCommand: strPtr("echo 'vagrant' | sudo -S shutdown -P now"),
|
||||
RawBootWait: strPtr("10s"),
|
||||
VBoxManage: [][]string{},
|
||||
VBoxManagePost: [][]string{},
|
||||
},
|
||||
},
|
||||
SourceRef{
|
||||
Type: "amazon-ebs",
|
||||
Name: "ubuntu-1604",
|
||||
}: {
|
||||
Type: "amazon-ebs",
|
||||
Name: "ubuntu-1604",
|
||||
Cfg: &amazonebs.FlatConfig{
|
||||
RawRegion: strPtr("eu-west-3"),
|
||||
AMIEncryptBootVolume: boolPtr(true),
|
||||
InstanceType: strPtr("t2.micro"),
|
||||
SourceAmiFilter: &awscommon.FlatAmiFilterOptions{
|
||||
Filters: map[string]string{
|
||||
"name": "ubuntu/images/*ubuntu-xenial-{16.04}-amd64-server-*",
|
||||
"root-device-type": "ebs",
|
||||
"virtualization-type": "hvm",
|
||||
},
|
||||
Owners: []string{"099720109477"},
|
||||
},
|
||||
AMIMappings: []awscommon.FlatBlockDevice{},
|
||||
LaunchMappings: []awscommon.FlatBlockDevice{},
|
||||
},
|
||||
},
|
||||
SourceRef{
|
||||
Type: "amazon-ebs",
|
||||
Name: "that-ubuntu-1.0",
|
||||
}: {
|
||||
Type: "amazon-ebs",
|
||||
Name: "that-ubuntu-1.0",
|
||||
Cfg: &amazonebs.FlatConfig{
|
||||
RawRegion: strPtr("eu-west-3"),
|
||||
AMIEncryptBootVolume: boolPtr(true),
|
||||
InstanceType: strPtr("t2.micro"),
|
||||
SourceAmiFilter: &awscommon.FlatAmiFilterOptions{
|
||||
MostRecent: boolPtr(true),
|
||||
},
|
||||
AMIMappings: []awscommon.FlatBlockDevice{},
|
||||
LaunchMappings: []awscommon.FlatBlockDevice{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
false,
|
||||
},
|
||||
|
||||
{
|
||||
"valid " + communicatorLabel + " load",
|
||||
defaultParser,
|
||||
args{"testdata/communicator/basic.pkr.hcl", new(PackerConfig)},
|
||||
&PackerConfig{
|
||||
Communicators: map[CommunicatorRef]*Communicator{
|
||||
{Type: "ssh", Name: "vagrant"}: {
|
||||
Type: "ssh", Name: "vagrant",
|
||||
Cfg: &communicator.FlatSSH{
|
||||
SSHUsername: strPtr("vagrant"),
|
||||
SSHPassword: strPtr("s3cr4t"),
|
||||
SSHClearAuthorizedKeys: boolPtr(true),
|
||||
SSHHost: strPtr("sssssh.hashicorp.io"),
|
||||
SSHHandshakeAttempts: intPtr(32),
|
||||
SSHPort: intPtr(42),
|
||||
SSHFileTransferMethod: strPtr("scp"),
|
||||
SSHPrivateKeyFile: strPtr("file.pem"),
|
||||
SSHPty: boolPtr(false),
|
||||
SSHTimeout: strPtr("5m"),
|
||||
SSHAgentAuth: boolPtr(false),
|
||||
SSHDisableAgentForwarding: boolPtr(true),
|
||||
SSHBastionHost: strPtr(""),
|
||||
SSHBastionPort: intPtr(0),
|
||||
SSHBastionAgentAuth: boolPtr(true),
|
||||
SSHBastionUsername: strPtr(""),
|
||||
SSHBastionPassword: strPtr(""),
|
||||
SSHBastionPrivateKeyFile: strPtr(""),
|
||||
SSHProxyHost: strPtr("ninja-potatoes.com"),
|
||||
SSHProxyPort: intPtr(42),
|
||||
SSHProxyUsername: strPtr("dark-father"),
|
||||
SSHProxyPassword: strPtr("pickle-rick"),
|
||||
SSHKeepAliveInterval: strPtr("10s"),
|
||||
SSHReadWriteTimeout: strPtr("5m"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
false,
|
||||
},
|
||||
|
||||
// {
|
||||
// "duplicate " + sourceLabel, defaultParser,
|
||||
// args{"testdata/sources/basic.pkr.hcl", &PackerConfig{
|
||||
// Sources: map[SourceRef]*Source{
|
||||
// SourceRef{
|
||||
// Type: "amazon-ebs",
|
||||
// Name: "ubuntu-1604",
|
||||
// }: {
|
||||
// Type: "amazon-ebs",
|
||||
// Name: "ubuntu-1604",
|
||||
// Cfg: &amazonebs.FlatConfig{RawRegion: "eu-west-3", InstanceType: "t2.micro"},
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// &PackerConfig{
|
||||
// Sources: map[SourceRef]*Source{
|
||||
// SourceRef{
|
||||
// Type: "virtualbox-iso",
|
||||
// Name: "ubuntu-1204",
|
||||
// }: {
|
||||
// Type: "virtualbox-iso",
|
||||
// Name: "ubuntu-1204",
|
||||
// Cfg: &iso.FlatConfig{
|
||||
// HTTPDir: "xxx",
|
||||
// ISOChecksum: "769474248a3897f4865817446f9a4a53",
|
||||
// ISOChecksumType: "md5",
|
||||
// RawSingleISOUrl: "http://releases.ubuntu.com/12.04/ubuntu-12.04.5-server-amd64.iso",
|
||||
// BootCommand: []string{"..."},
|
||||
// ShutdownCommand: "echo 'vagrant' | sudo -S shutdown -P now",
|
||||
// RawBootWait: "10s",
|
||||
// },
|
||||
// },
|
||||
// SourceRef{
|
||||
// Type: "amazon-ebs",
|
||||
// Name: "ubuntu-1604",
|
||||
// }: {
|
||||
// Type: "amazon-ebs",
|
||||
// Name: "ubuntu-1604",
|
||||
// Cfg: &amazonebs.FlatConfig{RawRegion: "eu-west-3", InstanceType: "t2.micro"},
|
||||
// },
|
||||
// SourceRef{
|
||||
// Type: "amazon-ebs",
|
||||
// Name: "that-ubuntu-1.0",
|
||||
// }: {
|
||||
// Type: "amazon-ebs",
|
||||
// Name: "that-ubuntu-1.0",
|
||||
// Cfg: &amazonebs.FlatConfig{RawRegion: "eu-west-3", InstanceType: "t2.micro"},
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// true,
|
||||
// },
|
||||
|
||||
// {"valid variables load", defaultParser,
|
||||
// args{"testdata/variables/basic.pkr.hcl", new(PackerConfig)},
|
||||
// &PackerConfig{
|
||||
// Variables: PackerV1Variables{
|
||||
// "image_name": "foo-image-{{user `my_secret`}}",
|
||||
// "key": "value",
|
||||
// "my_secret": "foo",
|
||||
// },
|
||||
// },
|
||||
// false,
|
||||
// },
|
||||
|
||||
// {"valid " + buildLabel + " load", defaultParser,
|
||||
// args{"testdata/build/basic.pkr.hcl", new(PackerConfig)},
|
||||
// &PackerConfig{
|
||||
// Builds: Builds{
|
||||
// {
|
||||
// Froms: BuildFromList{
|
||||
// {
|
||||
// Src: SourceRef{"amazon-ebs", "ubuntu-1604"},
|
||||
// },
|
||||
// {
|
||||
// Src: SourceRef{"virtualbox-iso", "ubuntu-1204"},
|
||||
// },
|
||||
// },
|
||||
// ProvisionerGroups: ProvisionerGroups{
|
||||
// &ProvisionerGroup{
|
||||
// CommunicatorRef: CommunicatorRef{"ssh", "vagrant"},
|
||||
// Provisioners: []Provisioner{
|
||||
// {Cfg: &shell.FlatConfig{
|
||||
// Inline: []string{"echo '{{user `my_secret`}}' :D"},
|
||||
// }},
|
||||
// {Cfg: &shell.FlatConfig{
|
||||
// Scripts: []string{"script-1.sh", "script-2.sh"},
|
||||
// ValidExitCodes: []int{0, 42},
|
||||
// }},
|
||||
// {Cfg: &file.FlatConfig{
|
||||
// Source: "app.tar.gz",
|
||||
// Destination: "/tmp/app.tar.gz",
|
||||
// }},
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// PostProvisionerGroups: ProvisionerGroups{
|
||||
// &ProvisionerGroup{
|
||||
// Provisioners: []Provisioner{
|
||||
// {Cfg: &amazon_import.FlatConfig{
|
||||
// Name: "that-ubuntu-1.0",
|
||||
// }},
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// &Build{
|
||||
// Froms: BuildFromList{
|
||||
// {
|
||||
// Src: SourceRef{"amazon", "that-ubuntu-1"},
|
||||
// },
|
||||
// },
|
||||
// ProvisionerGroups: ProvisionerGroups{
|
||||
// &ProvisionerGroup{
|
||||
// Provisioners: []Provisioner{
|
||||
// {Cfg: &shell.FlatConfig{
|
||||
// Inline: []string{"echo HOLY GUACAMOLE !"},
|
||||
// }},
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// false,
|
||||
// },
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
p := tt.parser
|
||||
f, moreDiags := p.ParseHCLFile(tt.args.filename)
|
||||
if moreDiags != nil {
|
||||
t.Fatalf("diags: %s", moreDiags)
|
||||
}
|
||||
diags := p.ParseFile(f, tt.args.cfg)
|
||||
if tt.wantDiags == (diags == nil) {
|
||||
for _, diag := range diags {
|
||||
t.Errorf("PackerConfig.Load() unexpected diagnostics. %v", diag)
|
||||
}
|
||||
t.Error("")
|
||||
}
|
||||
if diff := cmp.Diff(tt.wantPackerConfig, tt.args.cfg,
|
||||
cmpopts.IgnoreUnexported(cty.Value{}),
|
||||
cmpopts.IgnoreTypes(HCL2Ref{}),
|
||||
cmpopts.IgnoreTypes([]hcl.Range{}),
|
||||
cmpopts.IgnoreTypes(hcl.Range{}),
|
||||
cmpopts.IgnoreInterfaces(struct{ hcl.Expression }{}),
|
||||
cmpopts.IgnoreInterfaces(struct{ hcl.Body }{}),
|
||||
); diff != "" {
|
||||
t.Errorf("PackerConfig.Load() wrong packer config. %s", diff)
|
||||
}
|
||||
if t.Failed() {
|
||||
t.Fatal()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func strPtr(s string) *string { return &s }
|
||||
func intPtr(i int) *int { return &i }
|
||||
func boolPtr(b bool) *bool { return &b }
|
|
@ -0,0 +1,182 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/hclparse"
|
||||
)
|
||||
|
||||
const (
|
||||
sourceLabel = "source"
|
||||
variablesLabel = "variables"
|
||||
buildLabel = "build"
|
||||
communicatorLabel = "communicator"
|
||||
)
|
||||
|
||||
var configSchema = &hcl.BodySchema{
|
||||
Blocks: []hcl.BlockHeaderSchema{
|
||||
{Type: sourceLabel, LabelNames: []string{"type", "name"}},
|
||||
{Type: variablesLabel},
|
||||
{Type: buildLabel},
|
||||
{Type: communicatorLabel, LabelNames: []string{"type", "name"}},
|
||||
},
|
||||
}
|
||||
|
||||
type Parser struct {
|
||||
*hclparse.Parser
|
||||
|
||||
ProvisionersSchemas map[string]Decodable
|
||||
|
||||
PostProvisionersSchemas map[string]Decodable
|
||||
|
||||
CommunicatorSchemas map[string]Decodable
|
||||
|
||||
SourceSchemas map[string]Decodable
|
||||
}
|
||||
|
||||
const hcl2FileExt = ".pkr.hcl"
|
||||
|
||||
func (p *Parser) Parse(filename string) (*PackerConfig, hcl.Diagnostics) {
|
||||
var diags hcl.Diagnostics
|
||||
|
||||
hclFiles := []string{}
|
||||
jsonFiles := []string{}
|
||||
if strings.HasSuffix(filename, hcl2FileExt) {
|
||||
hclFiles = append(hclFiles, hcl2FileExt)
|
||||
} else if strings.HasSuffix(filename, ".json") {
|
||||
jsonFiles = append(jsonFiles, hcl2FileExt)
|
||||
} else {
|
||||
fileInfos, err := ioutil.ReadDir(filename)
|
||||
if err != nil {
|
||||
diag := &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Cannot read hcl directory",
|
||||
Detail: err.Error(),
|
||||
}
|
||||
diags = append(diags, diag)
|
||||
}
|
||||
for _, fileInfo := range fileInfos {
|
||||
if fileInfo.IsDir() {
|
||||
continue
|
||||
}
|
||||
filename := filepath.Join(filename, fileInfo.Name())
|
||||
if strings.HasSuffix(filename, hcl2FileExt) {
|
||||
hclFiles = append(hclFiles, filename)
|
||||
} else if strings.HasSuffix(filename, ".json") {
|
||||
jsonFiles = append(jsonFiles, filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var files []*hcl.File
|
||||
for _, filename := range hclFiles {
|
||||
f, moreDiags := p.ParseHCLFile(filename)
|
||||
diags = append(diags, moreDiags...)
|
||||
files = append(files, f)
|
||||
}
|
||||
for _, filename := range jsonFiles {
|
||||
f, moreDiags := p.ParseJSONFile(filename)
|
||||
diags = append(diags, moreDiags...)
|
||||
files = append(files, f)
|
||||
}
|
||||
if diags.HasErrors() {
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
cfg := &PackerConfig{}
|
||||
for _, file := range files {
|
||||
moreDiags := p.ParseFile(file, cfg)
|
||||
diags = append(diags, moreDiags...)
|
||||
}
|
||||
if diags.HasErrors() {
|
||||
return cfg, diags
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
// ParseFile filename content into cfg.
|
||||
//
|
||||
// ParseFile may be called multiple times with the same cfg on a different file.
|
||||
//
|
||||
// ParseFile returns as complete a config as we can manage, even if there are
|
||||
// errors, since a partial result can be useful for careful analysis by
|
||||
// development tools such as text editor extensions.
|
||||
func (p *Parser) ParseFile(f *hcl.File, cfg *PackerConfig) hcl.Diagnostics {
|
||||
var diags hcl.Diagnostics
|
||||
|
||||
content, moreDiags := f.Body.Content(configSchema)
|
||||
diags = append(diags, moreDiags...)
|
||||
|
||||
for _, block := range content.Blocks {
|
||||
switch block.Type {
|
||||
case sourceLabel:
|
||||
if cfg.Sources == nil {
|
||||
cfg.Sources = map[SourceRef]*Source{}
|
||||
}
|
||||
|
||||
source, moreDiags := p.decodeSource(block, p.SourceSchemas)
|
||||
diags = append(diags, moreDiags...)
|
||||
|
||||
ref := source.Ref()
|
||||
if existing := cfg.Sources[ref]; existing != nil {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Duplicate " + sourceLabel + " block",
|
||||
Detail: fmt.Sprintf("This "+sourceLabel+" block has the "+
|
||||
"same builder type and name as a previous block declared "+
|
||||
"at %s. Each "+sourceLabel+" must have a unique name per builder type.",
|
||||
existing.HCL2Ref.DeclRange),
|
||||
Subject: &source.HCL2Ref.DeclRange,
|
||||
})
|
||||
continue
|
||||
}
|
||||
cfg.Sources[ref] = source
|
||||
|
||||
case variablesLabel:
|
||||
if cfg.Variables == nil {
|
||||
cfg.Variables = PackerV1Variables{}
|
||||
}
|
||||
|
||||
moreDiags := cfg.Variables.decodeConfig(block)
|
||||
diags = append(diags, moreDiags...)
|
||||
|
||||
case buildLabel:
|
||||
build, moreDiags := p.decodeBuildConfig(block)
|
||||
diags = append(diags, moreDiags...)
|
||||
cfg.Builds = append(cfg.Builds, build)
|
||||
|
||||
case communicatorLabel:
|
||||
if cfg.Communicators == nil {
|
||||
cfg.Communicators = map[CommunicatorRef]*Communicator{}
|
||||
}
|
||||
communicator, moreDiags := p.decodeCommunicatorConfig(block)
|
||||
diags = append(diags, moreDiags...)
|
||||
|
||||
ref := communicator.Ref()
|
||||
|
||||
if existing := cfg.Communicators[ref]; existing != nil {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Duplicate " + communicatorLabel + " block",
|
||||
Detail: fmt.Sprintf("This "+communicatorLabel+" block has the "+
|
||||
"same type and name as a previous block declared "+
|
||||
"at %s. Each "+communicatorLabel+" must have a unique name per type.",
|
||||
existing.HCL2Ref.DeclRange),
|
||||
Subject: &communicator.HCL2Ref.DeclRange,
|
||||
})
|
||||
continue
|
||||
}
|
||||
cfg.Communicators[ref] = communicator
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected block type %q", block.Type)) // TODO(azr): err
|
||||
}
|
||||
}
|
||||
|
||||
return diags
|
||||
}
|
|
@ -0,0 +1,213 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
|
||||
awscommon "github.com/hashicorp/packer/builder/amazon/common"
|
||||
amazonebs "github.com/hashicorp/packer/builder/amazon/ebs"
|
||||
"github.com/hashicorp/packer/builder/virtualbox/iso"
|
||||
|
||||
"github.com/hashicorp/packer/helper/communicator"
|
||||
|
||||
amazon_import "github.com/hashicorp/packer/post-processor/amazon-import"
|
||||
|
||||
"github.com/hashicorp/packer/provisioner/file"
|
||||
"github.com/hashicorp/packer/provisioner/shell"
|
||||
)
|
||||
|
||||
func TestParser_Parse(t *testing.T) {
|
||||
defaultParser := getBasicParser()
|
||||
|
||||
type args struct {
|
||||
filename string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
parser *Parser
|
||||
args args
|
||||
wantCfg *PackerConfig
|
||||
wantDiags bool
|
||||
}{
|
||||
{"complete",
|
||||
defaultParser,
|
||||
args{"testdata/complete"},
|
||||
&PackerConfig{
|
||||
Sources: map[SourceRef]*Source{
|
||||
SourceRef{
|
||||
Type: "virtualbox-iso",
|
||||
Name: "ubuntu-1204",
|
||||
}: {
|
||||
Type: "virtualbox-iso",
|
||||
Name: "ubuntu-1204",
|
||||
Cfg: &iso.FlatConfig{
|
||||
HTTPDir: strPtr("xxx"),
|
||||
ISOChecksum: strPtr("769474248a3897f4865817446f9a4a53"),
|
||||
ISOChecksumType: strPtr("md5"),
|
||||
RawSingleISOUrl: strPtr("http://releases.ubuntu.com/12.04/ubuntu-12.04.5-server-amd64.iso"),
|
||||
BootCommand: []string{"..."},
|
||||
ShutdownCommand: strPtr("echo 'vagrant' | sudo -S shutdown -P now"),
|
||||
RawBootWait: strPtr("10s"),
|
||||
VBoxManage: [][]string{},
|
||||
VBoxManagePost: [][]string{},
|
||||
},
|
||||
},
|
||||
SourceRef{
|
||||
Type: "amazon-ebs",
|
||||
Name: "ubuntu-1604",
|
||||
}: {
|
||||
Type: "amazon-ebs",
|
||||
Name: "ubuntu-1604",
|
||||
Cfg: &amazonebs.FlatConfig{
|
||||
RawRegion: strPtr("eu-west-3"),
|
||||
AMIEncryptBootVolume: boolPtr(true),
|
||||
InstanceType: strPtr("t2.micro"),
|
||||
SourceAmiFilter: &awscommon.FlatAmiFilterOptions{
|
||||
Filters: map[string]string{
|
||||
"name": "ubuntu/images/*ubuntu-xenial-{16.04}-amd64-server-*",
|
||||
"root-device-type": "ebs",
|
||||
"virtualization-type": "hvm",
|
||||
},
|
||||
Owners: []string{"099720109477"},
|
||||
},
|
||||
AMIMappings: []awscommon.FlatBlockDevice{},
|
||||
LaunchMappings: []awscommon.FlatBlockDevice{},
|
||||
},
|
||||
},
|
||||
SourceRef{
|
||||
Type: "amazon-ebs",
|
||||
Name: "that-ubuntu-1.0",
|
||||
}: {
|
||||
Type: "amazon-ebs",
|
||||
Name: "that-ubuntu-1.0",
|
||||
Cfg: &amazonebs.FlatConfig{
|
||||
RawRegion: strPtr("eu-west-3"),
|
||||
AMIEncryptBootVolume: boolPtr(true),
|
||||
InstanceType: strPtr("t2.micro"),
|
||||
SourceAmiFilter: &awscommon.FlatAmiFilterOptions{
|
||||
MostRecent: boolPtr(true),
|
||||
},
|
||||
AMIMappings: []awscommon.FlatBlockDevice{},
|
||||
LaunchMappings: []awscommon.FlatBlockDevice{},
|
||||
},
|
||||
},
|
||||
},
|
||||
Communicators: map[CommunicatorRef]*Communicator{
|
||||
{Type: "ssh", Name: "vagrant"}: {
|
||||
Type: "ssh", Name: "vagrant",
|
||||
Cfg: &communicator.FlatSSH{
|
||||
SSHUsername: strPtr("vagrant"),
|
||||
SSHPassword: strPtr("s3cr4t"),
|
||||
SSHClearAuthorizedKeys: boolPtr(true),
|
||||
SSHHost: strPtr("sssssh.hashicorp.io"),
|
||||
SSHHandshakeAttempts: intPtr(32),
|
||||
SSHPort: intPtr(42),
|
||||
SSHFileTransferMethod: strPtr("scp"),
|
||||
SSHPrivateKeyFile: strPtr("file.pem"),
|
||||
SSHPty: boolPtr(false),
|
||||
SSHTimeout: strPtr("5m"),
|
||||
SSHAgentAuth: boolPtr(false),
|
||||
SSHDisableAgentForwarding: boolPtr(true),
|
||||
SSHBastionHost: strPtr(""),
|
||||
SSHBastionPort: intPtr(0),
|
||||
SSHBastionAgentAuth: boolPtr(true),
|
||||
SSHBastionUsername: strPtr(""),
|
||||
SSHBastionPassword: strPtr(""),
|
||||
SSHBastionPrivateKeyFile: strPtr(""),
|
||||
SSHProxyHost: strPtr("ninja-potatoes.com"),
|
||||
SSHProxyPort: intPtr(42),
|
||||
SSHProxyUsername: strPtr("dark-father"),
|
||||
SSHProxyPassword: strPtr("pickle-rick"),
|
||||
SSHKeepAliveInterval: strPtr("10s"),
|
||||
SSHReadWriteTimeout: strPtr("5m"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Variables: PackerV1Variables{
|
||||
"image_name": "foo-image-{{user `my_secret`}}",
|
||||
"key": "value",
|
||||
"my_secret": "foo",
|
||||
},
|
||||
Builds: Builds{
|
||||
{
|
||||
Froms: BuildFromList{
|
||||
{
|
||||
Src: SourceRef{"amazon-ebs", "ubuntu-1604"},
|
||||
},
|
||||
{
|
||||
Src: SourceRef{"virtualbox-iso", "ubuntu-1204"},
|
||||
},
|
||||
},
|
||||
ProvisionerGroups: ProvisionerGroups{
|
||||
&ProvisionerGroup{
|
||||
CommunicatorRef: CommunicatorRef{"ssh", "vagrant"},
|
||||
Provisioners: []Provisioner{
|
||||
{Cfg: &shell.FlatConfig{
|
||||
Inline: []string{"echo '{{user `my_secret`}}' :D"},
|
||||
}},
|
||||
{Cfg: &shell.FlatConfig{
|
||||
Scripts: []string{"script-1.sh", "script-2.sh"},
|
||||
ValidExitCodes: []int{0, 42},
|
||||
}},
|
||||
{Cfg: &file.FlatConfig{
|
||||
Source: strPtr("app.tar.gz"),
|
||||
Destination: strPtr("/tmp/app.tar.gz"),
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
PostProvisionerGroups: ProvisionerGroups{
|
||||
&ProvisionerGroup{
|
||||
Provisioners: []Provisioner{
|
||||
{Cfg: &amazon_import.FlatConfig{
|
||||
Name: strPtr("that-ubuntu-1.0"),
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
&Build{
|
||||
Froms: BuildFromList{
|
||||
{
|
||||
Src: SourceRef{"amazon", "that-ubuntu-1"},
|
||||
},
|
||||
},
|
||||
ProvisionerGroups: ProvisionerGroups{
|
||||
&ProvisionerGroup{
|
||||
Provisioners: []Provisioner{
|
||||
{Cfg: &shell.FlatConfig{
|
||||
Inline: []string{"echo HOLY GUACAMOLE !"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}, false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gotCfg, gotDiags := tt.parser.Parse(tt.args.filename)
|
||||
if tt.wantDiags == (gotDiags == nil) {
|
||||
t.Errorf("Parser.Parse() unexpected diagnostics. %s", gotDiags)
|
||||
}
|
||||
if diff := cmp.Diff(tt.wantCfg, gotCfg,
|
||||
cmpopts.IgnoreUnexported(cty.Value{}),
|
||||
cmpopts.IgnoreTypes(HCL2Ref{}),
|
||||
cmpopts.IgnoreTypes([]hcl.Range{}),
|
||||
cmpopts.IgnoreTypes(hcl.Range{}),
|
||||
cmpopts.IgnoreInterfaces(struct{ hcl.Expression }{}),
|
||||
cmpopts.IgnoreInterfaces(struct{ hcl.Body }{}),
|
||||
); diff != "" {
|
||||
t.Errorf("Parser.Parse() wrong packer config. %s", diff)
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
|
||||
// starts resources to provision them.
|
||||
build {
|
||||
from "src.amazon-ebs.ubuntu-1604" {
|
||||
ami_name = "that-ubuntu-1.0"
|
||||
}
|
||||
|
||||
from "src.virtualbox-iso.ubuntu-1204" {
|
||||
// build name is defaulted from the label "src.virtualbox-iso.ubuntu-1204"
|
||||
outout_dir = "path/"
|
||||
}
|
||||
|
||||
provision {
|
||||
communicator = "comm.ssh.vagrant"
|
||||
|
||||
shell {
|
||||
inline = [
|
||||
"echo '{{user `my_secret`}}' :D"
|
||||
]
|
||||
}
|
||||
|
||||
shell {
|
||||
valid_exit_codes = [
|
||||
0,
|
||||
42,
|
||||
]
|
||||
scripts = [
|
||||
"script-1.sh",
|
||||
"script-2.sh",
|
||||
]
|
||||
// override "vmware-iso" { // TODO(azr): handle common fields
|
||||
// execute_command = "echo 'password' | sudo -S bash {{.Path}}"
|
||||
// }
|
||||
}
|
||||
|
||||
file {
|
||||
source = "app.tar.gz"
|
||||
destination = "/tmp/app.tar.gz"
|
||||
// timeout = "5s" // TODO(azr): handle common fields
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
post_provision {
|
||||
amazon-import {
|
||||
// only = ["src.virtualbox-iso.ubuntu-1204"] // TODO(azr): handle common fields
|
||||
ami_name = "that-ubuntu-1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
build {
|
||||
// build an ami using the ami from the previous build block.
|
||||
from "src.amazon.that-ubuntu-1.0" {
|
||||
ami_name = "fooooobaaaar"
|
||||
}
|
||||
|
||||
provision {
|
||||
|
||||
shell {
|
||||
inline = [
|
||||
"echo HOLY GUACAMOLE !"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
|
||||
communicator "ssh" "vagrant" {
|
||||
ssh_password = "s3cr4t"
|
||||
ssh_username = "vagrant"
|
||||
ssh_agent_auth = false
|
||||
ssh_bastion_agent_auth = true
|
||||
ssh_bastion_host = ""
|
||||
ssh_bastion_password = ""
|
||||
ssh_bastion_port = 0
|
||||
ssh_bastion_private_key_file = ""
|
||||
ssh_bastion_username = ""
|
||||
ssh_clear_authorized_keys = true
|
||||
ssh_disable_agent_forwarding = true
|
||||
ssh_file_transfer_method = "scp"
|
||||
ssh_handshake_attempts = 32
|
||||
ssh_host = "sssssh.hashicorp.io"
|
||||
ssh_port = 42
|
||||
ssh_keep_alive_interval = "10s"
|
||||
ssh_private_key_file = "file.pem"
|
||||
ssh_proxy_host = "ninja-potatoes.com"
|
||||
ssh_proxy_password = "pickle-rick"
|
||||
ssh_proxy_port = "42"
|
||||
ssh_proxy_username = "dark-father"
|
||||
ssh_pty = false
|
||||
ssh_read_write_timeout = "5m"
|
||||
ssh_timeout = "5m"
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
../build/basic.pkr.hcl
|
|
@ -0,0 +1 @@
|
|||
../communicator/basic.pkr.hcl
|
|
@ -0,0 +1 @@
|
|||
../sources/basic.pkr.hcl
|
|
@ -0,0 +1,61 @@
|
|||
|
||||
// starts resources to provision them.
|
||||
build {
|
||||
from "src.amazon-ebs.ubuntu-1604" {
|
||||
ami_name = "that-ubuntu-1.0"
|
||||
}
|
||||
|
||||
from "src.virtualbox-iso.ubuntu-1204" {
|
||||
// build name is defaulted from the label "src.virtualbox-iso.ubuntu-1204"
|
||||
outout_dir = "path/"
|
||||
}
|
||||
|
||||
provision {
|
||||
communicator = comm.ssh.vagrant
|
||||
|
||||
shell {
|
||||
inline = [
|
||||
"echo '{{user `my_secret`}}' :D"
|
||||
]
|
||||
}
|
||||
|
||||
shell {
|
||||
script = [
|
||||
"script-1.sh",
|
||||
"script-2.sh",
|
||||
]
|
||||
override "vmware-iso" {
|
||||
execute_command = "echo 'password' | sudo -S bash {{.Path}}"
|
||||
}
|
||||
}
|
||||
|
||||
upload "log.go" "/tmp" {
|
||||
timeout = "5s"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
post_provision {
|
||||
amazon-import {
|
||||
only = ["src.virtualbox-iso.ubuntu-1204"]
|
||||
ami_name = "that-ubuntu-1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
build {
|
||||
// build an ami using the ami from the previous build block.
|
||||
from "src.amazon.that-ubuntu-1.0" {
|
||||
ami_name = "fooooobaaaar"
|
||||
}
|
||||
|
||||
provision {
|
||||
communicator = comm.ssh.vagrant
|
||||
|
||||
shell {
|
||||
inline = [
|
||||
"echo HOLY GUACAMOLE !"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
../variables/basic.pkr.hcl
|
|
@ -0,0 +1,37 @@
|
|||
// a source represents a reusable setting for a system boot/start.
|
||||
source "virtualbox-iso" "ubuntu-1204" {
|
||||
iso_url = "http://releases.ubuntu.com/12.04/ubuntu-12.04.5-server-amd64.iso"
|
||||
iso_checksum = "769474248a3897f4865817446f9a4a53"
|
||||
iso_checksum_type = "md5"
|
||||
|
||||
boot_wait = "10s"
|
||||
http_directory = "xxx"
|
||||
boot_command = ["..."]
|
||||
|
||||
shutdown_command = "echo 'vagrant' | sudo -S shutdown -P now"
|
||||
}
|
||||
|
||||
source "amazon-ebs" "ubuntu-1604" {
|
||||
instance_type = "t2.micro"
|
||||
encrypt_boot = true
|
||||
region = "eu-west-3"
|
||||
source_ami_filter {
|
||||
filters {
|
||||
virtualization-type = "hvm"
|
||||
name = "ubuntu/images/*ubuntu-xenial-{16.04}-amd64-server-*"
|
||||
root-device-type = "ebs"
|
||||
}
|
||||
owners = [
|
||||
"099720109477"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
source "amazon-ebs" "that-ubuntu-1.0" {
|
||||
instance_type = "t2.micro"
|
||||
encrypt_boot = true
|
||||
region = "eu-west-3"
|
||||
source_ami_filter {
|
||||
most_recent = true
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
variables {
|
||||
key = "value"
|
||||
my_secret = "foo"
|
||||
image_name = "foo-image-{{user `my_secret`}}"
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/hcl/v2/gohcl"
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/hclsyntax"
|
||||
)
|
||||
|
||||
type BuildFromList []BuildFrom
|
||||
|
||||
type BuildFrom struct {
|
||||
// source to take config from
|
||||
Src SourceRef `hcl:"-"`
|
||||
|
||||
HCL2Ref HCL2Ref
|
||||
}
|
||||
|
||||
func sourceRefFromString(in string) SourceRef {
|
||||
args := strings.Split(in, ".")
|
||||
if len(args) < 2 {
|
||||
return NoSource
|
||||
}
|
||||
if len(args) > 2 {
|
||||
// src.type.name
|
||||
args = args[1:]
|
||||
}
|
||||
return SourceRef{
|
||||
Type: args[0],
|
||||
Name: args[1],
|
||||
}
|
||||
}
|
||||
|
||||
func (bf *BuildFrom) decodeConfig(block *hcl.Block) hcl.Diagnostics {
|
||||
|
||||
bf.Src = sourceRefFromString(block.Labels[0])
|
||||
bf.HCL2Ref.DeclRange = block.DefRange
|
||||
|
||||
var b struct {
|
||||
Config hcl.Body `hcl:",remain"`
|
||||
}
|
||||
diags := gohcl.DecodeBody(block.Body, nil, &b)
|
||||
|
||||
if bf.Src == NoSource {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid " + sourceLabel + " reference",
|
||||
Detail: "A " + sourceLabel + " type must start with a letter and " +
|
||||
"may contain only letters, digits, underscores, and dashes." +
|
||||
"A valid source reference looks like: `src.type.name`",
|
||||
Subject: &block.LabelRanges[0],
|
||||
})
|
||||
}
|
||||
if !hclsyntax.ValidIdentifier(bf.Src.Type) ||
|
||||
!hclsyntax.ValidIdentifier(bf.Src.Name) {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid " + sourceLabel + " reference",
|
||||
Detail: "A " + sourceLabel + " type must start with a letter and " +
|
||||
"may contain only letters, digits, underscores, and dashes." +
|
||||
"A valid source reference looks like: `src.type.name`",
|
||||
Subject: &block.LabelRanges[0],
|
||||
})
|
||||
}
|
||||
|
||||
return diags
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
buildFromLabel = "from"
|
||||
|
||||
buildProvisionnersLabel = "provision"
|
||||
|
||||
buildPostProvisionnersLabel = "post_provision"
|
||||
)
|
||||
|
||||
var buildSchema = &hcl.BodySchema{
|
||||
Blocks: []hcl.BlockHeaderSchema{
|
||||
{Type: buildFromLabel, LabelNames: []string{"src"}},
|
||||
{Type: buildProvisionnersLabel},
|
||||
{Type: buildPostProvisionnersLabel},
|
||||
},
|
||||
}
|
||||
|
||||
type Build struct {
|
||||
// Ordered list of provisioner groups
|
||||
ProvisionerGroups ProvisionerGroups
|
||||
|
||||
// Ordered list of post-provisioner groups
|
||||
PostProvisionerGroups ProvisionerGroups
|
||||
|
||||
// Ordered list of output stanzas
|
||||
Froms BuildFromList
|
||||
|
||||
HCL2Ref HCL2Ref
|
||||
}
|
||||
|
||||
type Builds []*Build
|
||||
|
||||
func (p *Parser) decodeBuildConfig(block *hcl.Block) (*Build, hcl.Diagnostics) {
|
||||
build := &Build{}
|
||||
|
||||
content, diags := block.Body.Content(buildSchema)
|
||||
for _, block := range content.Blocks {
|
||||
switch block.Type {
|
||||
case buildFromLabel:
|
||||
bf := BuildFrom{}
|
||||
moreDiags := bf.decodeConfig(block)
|
||||
diags = append(diags, moreDiags...)
|
||||
build.Froms = append(build.Froms, bf)
|
||||
case buildProvisionnersLabel:
|
||||
pg, moreDiags := p.decodeProvisionerGroup(block, p.ProvisionersSchemas)
|
||||
diags = append(diags, moreDiags...)
|
||||
build.ProvisionerGroups = append(build.ProvisionerGroups, pg)
|
||||
case buildPostProvisionnersLabel:
|
||||
pg, moreDiags := p.decodeProvisionerGroup(block, p.PostProvisionersSchemas)
|
||||
diags = append(diags, moreDiags...)
|
||||
build.PostProvisionerGroups = append(build.PostProvisionerGroups, pg)
|
||||
}
|
||||
}
|
||||
|
||||
return build, diags
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/gohcl"
|
||||
)
|
||||
|
||||
// Provisioner represents a parsed provisioner
|
||||
type Provisioner struct {
|
||||
// Cfg is a parsed config
|
||||
Cfg interface{}
|
||||
}
|
||||
|
||||
type ProvisionerGroup struct {
|
||||
CommunicatorRef CommunicatorRef
|
||||
|
||||
Provisioners []Provisioner
|
||||
HCL2Ref HCL2Ref
|
||||
}
|
||||
|
||||
// ProvisionerGroups is a slice of provision blocks; which contains
|
||||
// provisioners
|
||||
type ProvisionerGroups []*ProvisionerGroup
|
||||
|
||||
func (p *Parser) decodeProvisionerGroup(block *hcl.Block, provisionerSpecs map[string]Decodable) (*ProvisionerGroup, hcl.Diagnostics) {
|
||||
var b struct {
|
||||
Communicator string `hcl:"communicator,optional"`
|
||||
Remain hcl.Body `hcl:",remain"`
|
||||
}
|
||||
|
||||
diags := gohcl.DecodeBody(block.Body, nil, &b)
|
||||
|
||||
pg := &ProvisionerGroup{}
|
||||
pg.CommunicatorRef = communicatorRefFromString(b.Communicator)
|
||||
pg.HCL2Ref.DeclRange = block.DefRange
|
||||
|
||||
buildSchema := &hcl.BodySchema{
|
||||
Blocks: []hcl.BlockHeaderSchema{},
|
||||
}
|
||||
for k := range provisionerSpecs {
|
||||
buildSchema.Blocks = append(buildSchema.Blocks, hcl.BlockHeaderSchema{
|
||||
Type: k,
|
||||
})
|
||||
}
|
||||
|
||||
content, moreDiags := b.Remain.Content(buildSchema)
|
||||
diags = append(diags, moreDiags...)
|
||||
for _, block := range content.Blocks {
|
||||
provisioner, found := provisionerSpecs[block.Type]
|
||||
if !found {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Summary: "Unknown " + buildProvisionnersLabel + " type",
|
||||
Subject: &block.LabelRanges[0],
|
||||
})
|
||||
continue
|
||||
}
|
||||
flatProvisinerCfg, moreDiags := decodeDecodable(block, nil, provisioner)
|
||||
diags = append(diags, moreDiags...)
|
||||
pg.Provisioners = append(pg.Provisioners, Provisioner{flatProvisinerCfg})
|
||||
}
|
||||
|
||||
return pg, diags
|
||||
}
|
||||
|
||||
func (pgs ProvisionerGroups) FirstCommunicatorRef() CommunicatorRef {
|
||||
if len(pgs) == 0 {
|
||||
return NoCommunicator
|
||||
}
|
||||
return pgs[0].CommunicatorRef
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/hclsyntax"
|
||||
)
|
||||
|
||||
type Communicator struct {
|
||||
// Type of communicator; ex: ssh
|
||||
Type string
|
||||
// Given name
|
||||
Name string
|
||||
|
||||
Cfg interface{}
|
||||
|
||||
HCL2Ref HCL2Ref
|
||||
}
|
||||
|
||||
func (communicator *Communicator) Ref() CommunicatorRef {
|
||||
return CommunicatorRef{
|
||||
Type: communicator.Type,
|
||||
Name: communicator.Name,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) decodeCommunicatorConfig(block *hcl.Block) (*Communicator, hcl.Diagnostics) {
|
||||
|
||||
output := &Communicator{}
|
||||
output.Type = block.Labels[0]
|
||||
output.Name = block.Labels[1]
|
||||
output.HCL2Ref.DeclRange = block.DefRange
|
||||
|
||||
diags := hcl.Diagnostics{}
|
||||
|
||||
communicator, found := p.CommunicatorSchemas[output.Type]
|
||||
if !found {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Unknown " + communicatorLabel + " type " + output.Type,
|
||||
Detail: "A " + communicatorLabel + " type must start with a letter and " +
|
||||
"may contain only letters, digits, underscores, and dashes.",
|
||||
Subject: &block.DefRange,
|
||||
})
|
||||
return output, diags
|
||||
}
|
||||
|
||||
flatCommunicator, moreDiags := decodeDecodable(block, nil, communicator)
|
||||
diags = append(diags, moreDiags...)
|
||||
output.Cfg = flatCommunicator
|
||||
|
||||
if !hclsyntax.ValidIdentifier(output.Name) {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid " + communicatorLabel + " name",
|
||||
Detail: "A " + communicatorLabel + " type must start with a letter and " +
|
||||
"may contain only letters, digits, underscores, and dashes.",
|
||||
Subject: &block.DefRange,
|
||||
})
|
||||
}
|
||||
|
||||
return output, diags
|
||||
}
|
||||
|
||||
type CommunicatorRef struct {
|
||||
Type string
|
||||
Name string
|
||||
}
|
||||
|
||||
// NoCommunicator is the zero value of CommunicatorRef, representing the
|
||||
// absense of Communicator.
|
||||
var NoCommunicator CommunicatorRef
|
||||
|
||||
func communicatorRefFromString(in string) CommunicatorRef {
|
||||
args := strings.Split(in, ".")
|
||||
if len(args) < 2 {
|
||||
return NoCommunicator
|
||||
}
|
||||
if len(args) > 2 {
|
||||
// comm.type.name
|
||||
args = args[1:]
|
||||
}
|
||||
return CommunicatorRef{
|
||||
Type: args[0],
|
||||
Name: args[1],
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/hcldec"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
"github.com/zclconf/go-cty/cty/gocty"
|
||||
)
|
||||
|
||||
type Decodable interface {
|
||||
FlatMapstructure() interface{}
|
||||
}
|
||||
|
||||
type SelfSpecified interface {
|
||||
HCL2Spec() map[string]hcldec.Spec
|
||||
}
|
||||
|
||||
func decodeDecodable(block *hcl.Block, ctx *hcl.EvalContext, dec Decodable) (interface{}, hcl.Diagnostics) {
|
||||
var diags hcl.Diagnostics
|
||||
|
||||
flatCfg := dec.FlatMapstructure()
|
||||
var spec hcldec.ObjectSpec
|
||||
if ss, selfSpecified := flatCfg.(SelfSpecified); selfSpecified {
|
||||
spec = hcldec.ObjectSpec(ss.HCL2Spec())
|
||||
} else {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Summary: "Unknown type",
|
||||
Subject: &block.DefRange,
|
||||
Detail: fmt.Sprintf("Cannot get spec from a %T", flatCfg),
|
||||
})
|
||||
return nil, diags
|
||||
}
|
||||
val, moreDiags := hcldec.Decode(block.Body, spec, ctx)
|
||||
diags = append(diags, moreDiags...)
|
||||
|
||||
err := gocty.FromCtyValue(val, flatCfg)
|
||||
if err != nil {
|
||||
switch err := err.(type) {
|
||||
case cty.PathError:
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Summary: "gocty.FromCtyValue: " + err.Error(),
|
||||
Subject: &block.DefRange,
|
||||
Detail: fmt.Sprintf("%v", err.Path),
|
||||
})
|
||||
default:
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Summary: "gocty.FromCtyValue: " + err.Error(),
|
||||
Subject: &block.DefRange,
|
||||
Detail: fmt.Sprintf("%v", err),
|
||||
})
|
||||
}
|
||||
}
|
||||
return flatCfg, diags
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
)
|
||||
|
||||
// reference to the source definition in configuration text file
|
||||
type HCL2Ref struct {
|
||||
// reference to the source definition in configuration text file
|
||||
DeclRange hcl.Range
|
||||
|
||||
// remainder of unparsed body
|
||||
Remain hcl.Body
|
||||
}
|
||||
|
||||
// func (hr *HCL2Ref) Blah() {
|
||||
// // hr.Remain.
|
||||
// ctyjson.Marshal(nil, nil)
|
||||
// hr.DeclRange.
|
||||
// }
|
|
@ -0,0 +1,165 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/packer/template"
|
||||
)
|
||||
|
||||
// PackerConfig represents a loaded packer config
|
||||
type PackerConfig struct {
|
||||
Sources map[SourceRef]*Source
|
||||
|
||||
Variables PackerV1Variables
|
||||
|
||||
Builds Builds
|
||||
|
||||
Communicators map[CommunicatorRef]*Communicator
|
||||
}
|
||||
|
||||
type PackerV1Build struct {
|
||||
Builders []*template.Builder
|
||||
Provisioners []*template.Provisioner
|
||||
PostProcessors []*template.PostProcessor
|
||||
}
|
||||
|
||||
func (pkrCfg *PackerConfig) ToV1Build() PackerV1Build {
|
||||
var diags hcl.Diagnostics
|
||||
res := PackerV1Build{}
|
||||
|
||||
for _, build := range pkrCfg.Builds {
|
||||
communicator, _ := pkrCfg.Communicators[build.ProvisionerGroups.FirstCommunicatorRef()]
|
||||
|
||||
for _, from := range build.Froms {
|
||||
source, found := pkrCfg.Sources[from.Src]
|
||||
if !found {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Unknown " + sourceLabel + " reference",
|
||||
Detail: "",
|
||||
Subject: &from.HCL2Ref.DeclRange,
|
||||
})
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// provisioners := build.ProvisionerGroups.FlatProvisioners()
|
||||
// postProcessors := build.PostProvisionerGroups.FlatProvisioners()
|
||||
|
||||
_ = from
|
||||
_ = source
|
||||
_ = communicator
|
||||
// _ = provisioners
|
||||
// _ = postProcessors
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (pkrCfg *PackerConfig) ToTemplate() (*template.Template, error) {
|
||||
var result template.Template
|
||||
// var errs error
|
||||
|
||||
result.Comments = nil
|
||||
result.Variables = pkrCfg.Variables.Variables()
|
||||
// TODO(azr): add sensitive variables
|
||||
|
||||
builder := pkrCfg.ToV1Build()
|
||||
_ = builder
|
||||
|
||||
// // Gather all the post-processors
|
||||
// if len(r.PostProcessors) > 0 {
|
||||
// result.PostProcessors = make([][]*PostProcessor, 0, len(r.PostProcessors))
|
||||
// }
|
||||
// for i, v := range r.PostProcessors {
|
||||
// // Parse the configurations. We need to do this because post-processors
|
||||
// // can take three different formats.
|
||||
// configs, err := r.parsePostProcessor(i, v)
|
||||
// if err != nil {
|
||||
// errs = multierror.Append(errs, err)
|
||||
// continue
|
||||
// }
|
||||
|
||||
// // Parse the PostProcessors out of the configs
|
||||
// pps := make([]*PostProcessor, 0, len(configs))
|
||||
// for j, c := range configs {
|
||||
// var pp PostProcessor
|
||||
// if err := r.decoder(&pp, nil).Decode(c); err != nil {
|
||||
// errs = multierror.Append(errs, fmt.Errorf(
|
||||
// "post-processor %d.%d: %s", i+1, j+1, err))
|
||||
// continue
|
||||
// }
|
||||
|
||||
// // Type is required
|
||||
// if pp.Type == "" {
|
||||
// errs = multierror.Append(errs, fmt.Errorf(
|
||||
// "post-processor %d.%d: type is required", i+1, j+1))
|
||||
// continue
|
||||
// }
|
||||
|
||||
// // Set the raw configuration and delete any special keys
|
||||
// pp.Config = c
|
||||
|
||||
// // The name defaults to the type if it isn't set
|
||||
// if pp.Name == "" {
|
||||
// pp.Name = pp.Type
|
||||
// }
|
||||
|
||||
// delete(pp.Config, "except")
|
||||
// delete(pp.Config, "only")
|
||||
// delete(pp.Config, "keep_input_artifact")
|
||||
// delete(pp.Config, "type")
|
||||
// delete(pp.Config, "name")
|
||||
|
||||
// if len(pp.Config) == 0 {
|
||||
// pp.Config = nil
|
||||
// }
|
||||
|
||||
// pps = append(pps, &pp)
|
||||
// }
|
||||
|
||||
// result.PostProcessors = append(result.PostProcessors, pps)
|
||||
// }
|
||||
|
||||
// // Gather all the provisioners
|
||||
// if len(r.Provisioners) > 0 {
|
||||
// result.Provisioners = make([]*Provisioner, 0, len(r.Provisioners))
|
||||
// }
|
||||
// for i, v := range r.Provisioners {
|
||||
// var p Provisioner
|
||||
// if err := r.decoder(&p, nil).Decode(v); err != nil {
|
||||
// errs = multierror.Append(errs, fmt.Errorf(
|
||||
// "provisioner %d: %s", i+1, err))
|
||||
// continue
|
||||
// }
|
||||
|
||||
// // Type is required before any richer validation
|
||||
// if p.Type == "" {
|
||||
// errs = multierror.Append(errs, fmt.Errorf(
|
||||
// "provisioner %d: missing 'type'", i+1))
|
||||
// continue
|
||||
// }
|
||||
|
||||
// // Set the raw configuration and delete any special keys
|
||||
// p.Config = v.(map[string]interface{})
|
||||
|
||||
// delete(p.Config, "except")
|
||||
// delete(p.Config, "only")
|
||||
// delete(p.Config, "override")
|
||||
// delete(p.Config, "pause_before")
|
||||
// delete(p.Config, "type")
|
||||
// delete(p.Config, "timeout")
|
||||
|
||||
// if len(p.Config) == 0 {
|
||||
// p.Config = nil
|
||||
// }
|
||||
|
||||
// result.Provisioners = append(result.Provisioners, &p)
|
||||
// }
|
||||
|
||||
// // If we have errors, return those with a nil result
|
||||
// if errs != nil {
|
||||
// return nil, errs
|
||||
// }
|
||||
|
||||
return &result, nil
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
)
|
||||
|
||||
// A source field in an HCL file will load into the Source type.
|
||||
//
|
||||
type Source struct {
|
||||
// Type of source; ex: virtualbox-iso
|
||||
Type string
|
||||
// Given name; if any
|
||||
Name string
|
||||
|
||||
Cfg interface{}
|
||||
|
||||
HCL2Ref HCL2Ref
|
||||
}
|
||||
|
||||
func (p *Parser) decodeSource(block *hcl.Block, sourceSpecs map[string]Decodable) (*Source, hcl.Diagnostics) {
|
||||
source := &Source{
|
||||
Type: block.Labels[0],
|
||||
Name: block.Labels[1],
|
||||
}
|
||||
source.HCL2Ref.DeclRange = block.DefRange
|
||||
|
||||
var diags hcl.Diagnostics
|
||||
|
||||
sourceSpec, found := sourceSpecs[source.Type]
|
||||
if !found {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Summary: "Unknown " + sourceLabel + " type",
|
||||
Subject: &block.LabelRanges[0],
|
||||
})
|
||||
return source, diags
|
||||
}
|
||||
|
||||
flatSource, moreDiags := decodeDecodable(block, nil, sourceSpec)
|
||||
diags = append(diags, moreDiags...)
|
||||
source.Cfg = flatSource
|
||||
|
||||
return source, diags
|
||||
}
|
||||
|
||||
func (source *Source) Ref() SourceRef {
|
||||
return SourceRef{
|
||||
Type: source.Type,
|
||||
Name: source.Name,
|
||||
}
|
||||
}
|
||||
|
||||
type SourceRef struct {
|
||||
Type string
|
||||
Name string
|
||||
}
|
||||
|
||||
// NoSource is the zero value of sourceRef, representing the absense of an
|
||||
// source.
|
||||
var NoSource SourceRef
|
||||
|
||||
func sourceRefFromAbsTraversal(t hcl.Traversal) (SourceRef, hcl.Diagnostics) {
|
||||
var diags hcl.Diagnostics
|
||||
if len(t) != 3 {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid " + sourceLabel + " reference",
|
||||
Detail: "A " + sourceLabel + " reference must have three parts separated by periods: the keyword \"" + sourceLabel + "\", the builder type name, and the source name.",
|
||||
Subject: t.SourceRange().Ptr(),
|
||||
})
|
||||
return NoSource, diags
|
||||
}
|
||||
|
||||
if t.RootName() != sourceLabel {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid " + sourceLabel + " reference",
|
||||
Detail: "The first part of an source reference must be the keyword \"" + sourceLabel + "\".",
|
||||
Subject: t[0].SourceRange().Ptr(),
|
||||
})
|
||||
return NoSource, diags
|
||||
}
|
||||
btStep, ok := t[1].(hcl.TraverseAttr)
|
||||
if !ok {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid " + sourceLabel + " reference",
|
||||
Detail: "The second part of an " + sourceLabel + " reference must be an identifier giving the builder type of the " + sourceLabel + ".",
|
||||
Subject: t[1].SourceRange().Ptr(),
|
||||
})
|
||||
return NoSource, diags
|
||||
}
|
||||
nameStep, ok := t[2].(hcl.TraverseAttr)
|
||||
if !ok {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Invalid " + sourceLabel + " reference",
|
||||
Detail: "The third part of an " + sourceLabel + " reference must be an identifier giving the name of the " + sourceLabel + ".",
|
||||
Subject: t[2].SourceRange().Ptr(),
|
||||
})
|
||||
return NoSource, diags
|
||||
}
|
||||
|
||||
return SourceRef{
|
||||
Type: btStep.Name,
|
||||
Name: nameStep.Name,
|
||||
}, diags
|
||||
}
|
||||
|
||||
func (r SourceRef) String() string {
|
||||
return fmt.Sprintf("%s.%s", r.Type, r.Name)
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/hcl/v2/gohcl"
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/packer/template"
|
||||
)
|
||||
|
||||
type PackerV1Variables map[string]string
|
||||
|
||||
// decodeConfig decodes a "variables" section the way packer 1 used to
|
||||
func (variables *PackerV1Variables) decodeConfig(block *hcl.Block) hcl.Diagnostics {
|
||||
return gohcl.DecodeBody(block.Body, nil, variables)
|
||||
}
|
||||
|
||||
func (variables PackerV1Variables) Variables() map[string]*template.Variable {
|
||||
res := map[string]*template.Variable{}
|
||||
|
||||
for k, v := range variables {
|
||||
res[k] = &template.Variable{
|
||||
Key: k,
|
||||
Default: v,
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
|
@ -0,0 +1,156 @@
|
|||
package hcl2template
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func translateBuilder(path string) (string, error) {
|
||||
|
||||
type ConfigV1 map[string]json.RawMessage
|
||||
|
||||
type ConfigV1V2 struct {
|
||||
Artifact map[string]map[string]json.RawMessage `json:"artifact"`
|
||||
}
|
||||
|
||||
type Type struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
type PostProcessor struct {
|
||||
Type string `json:"type"`
|
||||
Except []string `json:"except"`
|
||||
Only []string `json:"only"`
|
||||
}
|
||||
|
||||
b, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
c1 := ConfigV1{}
|
||||
if err := json.Unmarshal(b, &c1); err != nil {
|
||||
return "", err
|
||||
}
|
||||
c12 := ConfigV1V2{}
|
||||
if err := json.Unmarshal(b, &c12); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
rawBuilder, found := c1["builders"]
|
||||
if !found {
|
||||
// no v1 builders
|
||||
return path, nil
|
||||
}
|
||||
|
||||
var tn []Type
|
||||
if err := json.Unmarshal([]byte(rawBuilder), &tn); err != nil {
|
||||
return "", err
|
||||
}
|
||||
var rawbuilders []json.RawMessage
|
||||
if err := json.Unmarshal([]byte(rawBuilder), &rawbuilders); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var typePPs []PostProcessor
|
||||
var rawPPs []json.RawMessage
|
||||
if rawPP := c1["post-processors"]; len(rawPP) != 0 {
|
||||
if err := json.Unmarshal([]byte(rawPP), &typePPs); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if err := json.Unmarshal([]byte(rawPP), &rawPPs); err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
for n, tn := range tn {
|
||||
builderName := tn.Type
|
||||
if tn.Name != "" {
|
||||
builderName = tn.Name
|
||||
}
|
||||
|
||||
if c12.Artifact[tn.Type] == nil {
|
||||
c12.Artifact[tn.Type] = map[string]json.RawMessage{}
|
||||
}
|
||||
|
||||
name := tn.Name
|
||||
if name == "" {
|
||||
name = fmt.Sprintf("autotranslated-builder-%d", len(c12.Artifact[tn.Type]))
|
||||
}
|
||||
if _, exists := c12.Artifact[tn.Type][name]; exists {
|
||||
return "", fmt.Errorf("%s-%s is defined in old and new config", tn.Type, name)
|
||||
}
|
||||
rawbuilder := rawbuilders[n]
|
||||
rawbuilder = removeKey(rawbuilder, "name", "only", "type")
|
||||
c12.Artifact[tn.Type][name] = rawbuilder
|
||||
|
||||
for n, pp := range typePPs {
|
||||
skip := false
|
||||
for _, except := range pp.Except {
|
||||
if except == builderName {
|
||||
skip = true
|
||||
break
|
||||
}
|
||||
}
|
||||
for _, only := range pp.Only {
|
||||
if only != builderName {
|
||||
skip = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if skip {
|
||||
continue
|
||||
}
|
||||
if c12.Artifact[pp.Type] == nil {
|
||||
c12.Artifact[pp.Type] = map[string]json.RawMessage{}
|
||||
}
|
||||
name := fmt.Sprintf("autotranslated-post-processor-%d", len(c12.Artifact[pp.Type]))
|
||||
if _, exists := c12.Artifact[tn.Type][name]; exists {
|
||||
return "", fmt.Errorf("%s-%s is defined in old and new config", tn.Type, name)
|
||||
}
|
||||
rawpp := rawPPs[n]
|
||||
rawpp = rawpp[:len(rawpp)-1]
|
||||
rawpp = append(rawpp, json.RawMessage(`,"source":"$artifacts.`+tn.Type+`.`+builderName+`"}`)...)
|
||||
rawpp = removeKey(rawpp, "name", "only", "type")
|
||||
c12.Artifact[pp.Type][name] = rawpp
|
||||
|
||||
log.Printf("%s", rawpp)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
path = strings.TrimSuffix(path, ".json")
|
||||
path = strings.TrimSuffix(path, ".pk")
|
||||
path = path + ".v2.pk.json"
|
||||
|
||||
file, err := os.Create(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
enc := json.NewEncoder(file)
|
||||
enc.SetIndent("", " ")
|
||||
|
||||
return path, enc.Encode(c12)
|
||||
}
|
||||
|
||||
func removeKey(in json.RawMessage, keys ...string) json.RawMessage {
|
||||
m := map[string]json.RawMessage{}
|
||||
if err := json.Unmarshal(in, &m); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, key := range keys {
|
||||
delete(m, key)
|
||||
}
|
||||
|
||||
b, err := json.Marshal(m)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return b
|
||||
}
|
Loading…
Reference in New Issue