HCL2: add post-processors block to run multiple post-processor after a build (#9638)

added `post-processors` block to run chained post-processors after a build.
Before this, defining multiple `post-processor` blocks after
provisioning steps would run them sequentially, now doing this makes them start
from the build's artifact. To queue post-processors you now have to define them
in a `post-processors` block.

This is a breaking change.
This commit is contained in:
Adrien Delorme 2020-07-28 10:02:37 +02:00 committed by GitHub
parent 882fcd9367
commit c7b35dd6bc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 566 additions and 81 deletions

View File

@ -1,5 +1,13 @@
## 1.6.1 (Upcoming) ## 1.6.1 (Upcoming)
### BACKWARDS INCOMPATABILITIES:
* only for HCL: added `post-processors` block to run chained post-processors
after a build [GH-9638]. Before this, defining multiple `post-processor`
blocks after provisioning steps would run them sequentially, now doing this
makes them start from the build artifact. To queue post-processors you now
have to define them in a `post-processors` block. [GH-9638]
### FEATURES: ### FEATURES:
* **New post-processor** Yandex Import [GH-9553] * **New post-processor** Yandex Import [GH-9553]

View File

@ -84,7 +84,15 @@ Use it at will.
post-processors: post-processors:
manifest 0:
manifest
1:
shell-local
2:
manifest
shell-local
`}, `},
} }

View File

@ -26,4 +26,15 @@ EOF
post-processor "manifest" { post-processor "manifest" {
} }
post-processor "shell-local" {
}
post-processors {
post-processor "manifest" {
}
post-processor "shell-local" {
}
}
} }

View File

@ -12,17 +12,19 @@ build {
"source.file.vanilla" "source.file.vanilla"
] ]
post-processor "shell-local" { post-processors {
name = "apple" post-processor "shell-local" {
inline = [ "echo apple 'apple'" ] name = "apple"
} inline = [ "echo apple 'apple'" ]
}
post-processor "shell-local" { post-processor "shell-local" {
name = "pear" name = "pear"
inline = [ "echo apple 'pear'" ] inline = [ "echo apple 'pear'" ]
} }
post-processor "shell-local" { post-processor "shell-local" {
name = "banana" name = "banana"
}
} }
} }

View File

@ -213,4 +213,208 @@ build {
nested_slice { nested_slice {
} }
} }
post-processors {
post-processor "amazon-import" {
name = "first-nested-post-processor"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
post-processor "amazon-import" {
name = "second-nested-post-processor"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
}
post-processors {
post-processor "amazon-import" {
name = "third-nested-post-processor"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
post-processor "amazon-import" {
name = "fourth-nested-post-processor"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
}
} }

View File

@ -14,6 +14,8 @@ const (
buildProvisionerLabel = "provisioner" buildProvisionerLabel = "provisioner"
buildPostProcessorLabel = "post-processor" buildPostProcessorLabel = "post-processor"
buildPostProcessorsLabel = "post-processors"
) )
var buildSchema = &hcl.BodySchema{ var buildSchema = &hcl.BodySchema{
@ -22,6 +24,13 @@ var buildSchema = &hcl.BodySchema{
{Type: sourceLabel, LabelNames: []string{"reference"}}, {Type: sourceLabel, LabelNames: []string{"reference"}},
{Type: buildProvisionerLabel, LabelNames: []string{"type"}}, {Type: buildProvisionerLabel, LabelNames: []string{"type"}},
{Type: buildPostProcessorLabel, LabelNames: []string{"type"}}, {Type: buildPostProcessorLabel, LabelNames: []string{"type"}},
{Type: buildPostProcessorsLabel, LabelNames: []string{}},
},
}
var postProcessorsSchema = &hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{Type: buildPostProcessorLabel, LabelNames: []string{"type"}},
}, },
} }
@ -49,9 +58,10 @@ type BuildBlock struct {
// will be ran against the sources. // will be ran against the sources.
ProvisionerBlocks []*ProvisionerBlock ProvisionerBlocks []*ProvisionerBlock
// ProvisionerBlocks references a list of HCL post-processors block that // PostProcessorLists references the lists of lists of HCL post-processors
// will be ran against the artifacts from the provisioning steps. // block that will be run against the artifacts from the provisioning
PostProcessors []*PostProcessorBlock // steps.
PostProcessorsLists [][]*PostProcessorBlock
HCL2Ref HCL2Ref HCL2Ref HCL2Ref
} }
@ -125,7 +135,29 @@ func (p *Parser) decodeBuildConfig(block *hcl.Block) (*BuildBlock, hcl.Diagnosti
if moreDiags.HasErrors() { if moreDiags.HasErrors() {
continue continue
} }
build.PostProcessors = append(build.PostProcessors, pp) build.PostProcessorsLists = append(build.PostProcessorsLists, []*PostProcessorBlock{pp})
case buildPostProcessorsLabel:
content, moreDiags := block.Body.Content(postProcessorsSchema)
diags = append(diags, moreDiags...)
if moreDiags.HasErrors() {
continue
}
errored := false
postProcessors := []*PostProcessorBlock{}
for _, block := range content.Blocks {
pp, moreDiags := p.decodePostProcessor(block)
diags = append(diags, moreDiags...)
if moreDiags.HasErrors() {
errored = true
break
}
postProcessors = append(postProcessors, pp)
}
if errored == false {
build.PostProcessorsLists = append(build.PostProcessorsLists, postProcessors)
}
} }
} }

View File

@ -34,9 +34,11 @@ func TestParse_build(t *testing.T) {
PType: "file", PType: "file",
}, },
}, },
PostProcessors: []*PostProcessorBlock{ PostProcessorsLists: [][]*PostProcessorBlock{
{ {
PType: "amazon-import", {
PType: "amazon-import",
},
}, },
}, },
}, },
@ -139,22 +141,30 @@ func TestParse_build(t *testing.T) {
SourceRef{Type: "amazon-ebs", Name: "ubuntu-1604", LocalName: "aws-ubuntu-16.04"}, SourceRef{Type: "amazon-ebs", Name: "ubuntu-1604", LocalName: "aws-ubuntu-16.04"},
}, },
ProvisionerBlocks: nil, ProvisionerBlocks: nil,
PostProcessors: []*PostProcessorBlock{ PostProcessorsLists: [][]*PostProcessorBlock{
{ {
PType: "amazon-import", {
OnlyExcept: OnlyExcept{Only: []string{"virtualbox-iso.ubuntu-1204"}, Except: nil}, PType: "amazon-import",
OnlyExcept: OnlyExcept{Only: []string{"virtualbox-iso.ubuntu-1204"}, Except: nil},
},
}, },
{ {
PType: "manifest", {
OnlyExcept: OnlyExcept{Only: nil, Except: []string{"virtualbox-iso.ubuntu-1204"}}, PType: "manifest",
OnlyExcept: OnlyExcept{Only: nil, Except: []string{"virtualbox-iso.ubuntu-1204"}},
},
}, },
{ {
PType: "amazon-import", {
OnlyExcept: OnlyExcept{Only: []string{"amazon-ebs.aws-ubuntu-16.04"}, Except: nil}, PType: "amazon-import",
OnlyExcept: OnlyExcept{Only: []string{"amazon-ebs.aws-ubuntu-16.04"}, Except: nil},
},
}, },
{ {
PType: "manifest", {
OnlyExcept: OnlyExcept{Only: nil, Except: []string{"amazon-ebs.aws-ubuntu-16.04"}}, PType: "manifest",
OnlyExcept: OnlyExcept{Only: nil, Except: []string{"amazon-ebs.aws-ubuntu-16.04"}},
},
}, },
}, },
}, },
@ -180,6 +190,8 @@ func TestParse_build(t *testing.T) {
}, },
}, },
}, },
},
{
{ {
PType: "manifest", PType: "manifest",
PostProcessor: &HCL2PostProcessor{ PostProcessor: &HCL2PostProcessor{
@ -212,6 +224,8 @@ func TestParse_build(t *testing.T) {
}, },
}, },
}, },
},
{
{ {
PType: "amazon-import", PType: "amazon-import",
PostProcessor: &HCL2PostProcessor{ PostProcessor: &HCL2PostProcessor{

View File

@ -254,41 +254,47 @@ func (cfg *PackerConfig) getCoreBuildProvisioners(source SourceBlock, blocks []*
// getCoreBuildProvisioners takes a list of post processor block, starts // getCoreBuildProvisioners takes a list of post processor block, starts
// according provisioners and sends parsed HCL2 over to it. // according provisioners and sends parsed HCL2 over to it.
func (cfg *PackerConfig) getCoreBuildPostProcessors(source SourceBlock, blocks []*PostProcessorBlock, ectx *hcl.EvalContext) ([]packer.CoreBuildPostProcessor, hcl.Diagnostics) { func (cfg *PackerConfig) getCoreBuildPostProcessors(source SourceBlock, blocksList [][]*PostProcessorBlock, ectx *hcl.EvalContext) ([][]packer.CoreBuildPostProcessor, hcl.Diagnostics) {
var diags hcl.Diagnostics var diags hcl.Diagnostics
res := []packer.CoreBuildPostProcessor{} res := [][]packer.CoreBuildPostProcessor{}
for _, ppb := range blocks { for _, blocks := range blocksList {
if ppb.OnlyExcept.Skip(source.String()) { pps := []packer.CoreBuildPostProcessor{}
continue for _, ppb := range blocks {
} if ppb.OnlyExcept.Skip(source.String()) {
continue
}
name := ppb.PName name := ppb.PName
if name == "" { if name == "" {
name = ppb.PType name = ppb.PType
} }
// -except // -except
exclude := false exclude := false
for _, exceptGlob := range cfg.except { for _, exceptGlob := range cfg.except {
if exceptGlob.Match(name) { if exceptGlob.Match(name) {
exclude = true exclude = true
break
}
}
if exclude {
break break
} }
}
if exclude {
break
}
postProcessor, moreDiags := cfg.startPostProcessor(source, ppb, ectx) postProcessor, moreDiags := cfg.startPostProcessor(source, ppb, ectx)
diags = append(diags, moreDiags...) diags = append(diags, moreDiags...)
if moreDiags.HasErrors() { if moreDiags.HasErrors() {
continue continue
}
pps = append(pps, packer.CoreBuildPostProcessor{
PostProcessor: postProcessor,
PName: ppb.PName,
PType: ppb.PType,
KeepInputArtifact: ppb.KeepInputArtifact,
})
}
if len(pps) > 0 {
res = append(res, pps)
} }
res = append(res, packer.CoreBuildPostProcessor{
PostProcessor: postProcessor,
PName: ppb.PName,
PType: ppb.PType,
KeepInputArtifact: ppb.KeepInputArtifact,
})
} }
return res, diags return res, diags
@ -387,11 +393,7 @@ func (cfg *PackerConfig) GetBuilds(opts packer.GetBuildsOptions) ([]packer.Build
if moreDiags.HasErrors() { if moreDiags.HasErrors() {
continue continue
} }
postProcessors, moreDiags := cfg.getCoreBuildPostProcessors(src, build.PostProcessors, cfg.EvalContext(variables)) pps, moreDiags := cfg.getCoreBuildPostProcessors(src, build.PostProcessorsLists, cfg.EvalContext(variables))
pps := [][]packer.CoreBuildPostProcessor{}
if len(postProcessors) > 0 {
pps = [][]packer.CoreBuildPostProcessor{postProcessors}
} // TODO(azr): remove this
diags = append(diags, moreDiags...) diags = append(diags, moreDiags...)
if moreDiags.HasErrors() { if moreDiags.HasErrors() {
continue continue
@ -500,16 +502,19 @@ func (p *PackerConfig) printBuilds() string {
} }
fmt.Fprintf(out, " %s\n", str) fmt.Fprintf(out, " %s\n", str)
} }
fmt.Fprintf(out, "\n post-processors:\n\n") fmt.Fprintf(out, "\n post-processors:\n")
if len(build.PostProcessors) == 0 { if len(build.PostProcessorsLists) == 0 {
fmt.Fprintf(out, " <no post-processor>\n") fmt.Fprintf(out, "\n <no post-processor>\n")
} }
for _, pp := range build.PostProcessors { for i, ppList := range build.PostProcessorsLists {
str := pp.PType fmt.Fprintf(out, "\n %d:\n", i)
if pp.PName != "" { for _, pp := range ppList {
str = strings.Join([]string{pp.PType, pp.PName}, ".") str := pp.PType
if pp.PName != "" {
str = strings.Join([]string{pp.PType, pp.PName}, ".")
}
fmt.Fprintf(out, " %s\n", str)
} }
fmt.Fprintf(out, " %s\n", str)
} }
} }
return out.String() return out.String()

View File

@ -89,14 +89,38 @@ func TestParser_complete(t *testing.T) {
}, },
{PType: "file"}, {PType: "file"},
}, },
PostProcessors: []*PostProcessorBlock{ PostProcessorsLists: [][]*PostProcessorBlock{
{ {
PType: "amazon-import", {
PName: "something", PType: "amazon-import",
KeepInputArtifact: pTrue, PName: "something",
KeepInputArtifact: pTrue,
},
}, },
{ {
PType: "amazon-import", {
PType: "amazon-import",
},
},
{
{
PType: "amazon-import",
PName: "first-nested-post-processor",
},
{
PType: "amazon-import",
PName: "second-nested-post-processor",
},
},
{
{
PType: "amazon-import",
PName: "third-nested-post-processor",
},
{
PType: "amazon-import",
PName: "fourth-nested-post-processor",
},
}, },
}, },
}, },
@ -133,6 +157,8 @@ func TestParser_complete(t *testing.T) {
}, },
KeepInputArtifact: pTrue, KeepInputArtifact: pTrue,
}, },
},
{
{ {
PType: "amazon-import", PType: "amazon-import",
PostProcessor: &HCL2PostProcessor{ PostProcessor: &HCL2PostProcessor{
@ -140,6 +166,38 @@ func TestParser_complete(t *testing.T) {
}, },
}, },
}, },
{
{
PType: "amazon-import",
PName: "first-nested-post-processor",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
},
},
{
PType: "amazon-import",
PName: "second-nested-post-processor",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
},
},
},
{
{
PType: "amazon-import",
PName: "third-nested-post-processor",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
},
},
{
PType: "amazon-import",
PName: "fourth-nested-post-processor",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
},
},
},
}, },
}, },
&packer.CoreBuild{ &packer.CoreBuild{
@ -180,6 +238,8 @@ func TestParser_complete(t *testing.T) {
}, },
KeepInputArtifact: pTrue, KeepInputArtifact: pTrue,
}, },
},
{
{ {
PType: "amazon-import", PType: "amazon-import",
PostProcessor: &HCL2PostProcessor{ PostProcessor: &HCL2PostProcessor{
@ -187,6 +247,38 @@ func TestParser_complete(t *testing.T) {
}, },
}, },
}, },
{
{
PType: "amazon-import",
PName: "first-nested-post-processor",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
},
},
{
PType: "amazon-import",
PName: "second-nested-post-processor",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
},
},
},
{
{
PType: "amazon-import",
PName: "third-nested-post-processor",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
},
},
{
PType: "amazon-import",
PName: "fourth-nested-post-processor",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
},
},
},
}, },
}, },
}, },

View File

@ -14,7 +14,12 @@ export default [
content: [ content: [
{ {
category: 'build', category: 'build',
content: ['source', 'provisioner', 'post-processor'], content: [
'source',
'provisioner',
'post-processor',
'post-processors',
],
}, },
'locals', 'locals',
'source', 'source',

View File

@ -15,21 +15,29 @@ The `post-processor` block defines how a post-processor is configured.
```hcl ```hcl
# builds.pkr.hcl # builds.pkr.hcl
build { build {
# ... # ... build image
post-processor "checksum" { post-processor "checksum" { # checksum image
checksum_types = [ "md5", "sha512" ] # checksum the artifact checksum_types = [ "md5", "sha512" ] # checksum the artifact
keep_input_artifact = true # keep the artifact keep_input_artifact = true # keep the artifact
} }
post-processor "amazon-import" { # upload image to amazon
}
} }
``` ```
Post-processors run after the image is built by the builder and provisioned by Each `post-processor` runs after each defined build. A post-processor takes the
the provisioner(s). Post-processors are optional, and they can be used to `Artifact` from a build. Post-processors are optional, and they can be used to
upload artifacts, re-package, or more. The list of available post-processors upload artifacts, re-package, or more. The list of available post-processors
can be found in the [post-processors](/docs/post-processors) section. can be found in the [post-processors](/docs/post-processors) section.
A `post-processor` can also take the `Artifact` from another post-processor
when it is defined in a [`post-processors`
block](/docs/from-1.5/blocks/build/post-processor) list, that is a list of
chained post processors.
-> Note: The input 'artifact' received by a post-processor will be automatically -> Note: The input 'artifact' received by a post-processor will be automatically
deleted. deleted by default.
# Keep an input artifact # Keep an input artifact
@ -74,5 +82,14 @@ The values within `only` or `except` are _source names_, not builder types.
## Build Contextual Variables ## Build Contextual Variables
Packer allows to access connection information and basic instance state information from a post-processor. These information are stored in the `build` variable. Packer allows to access connection information and basic instance state
Check out the [Contextual Variables](/docs/from-1.5/contextual-variables) documentation to learn more about and see some examples of how to use them. information from a post-processor. These information are stored in the `build`
variable. Check out the [Contextual
Variables](/docs/from-1.5/contextual-variables) documentation to learn more
about and see some examples of how to use them.
### Related
* The [`post-processors` block](/docs/from-1.5/blocks/build/post-processor)
allows to define one or more chain of `post-processor`s that will take the
output from the build and provision steps.

View File

@ -0,0 +1,87 @@
---
description: >
The post-processors block allows to define lists of post-processors to apply
to an artifact.
layout: docs
page_title: post-processors - build - Blocks
sidebar_title: <tt>post-processors</tt>
---
# The `post-processors` block
`@include 'from-1.5/beta-hcl2-note.mdx'`
The `post-processors` block allows to define lists of
[`post-processor`s](/docs/from-1.5/blocks/build/post-processor), that will run
from the artifact of each build.
```hcl
# builds.pkr.hcl
build {
# ...
post-processors {
post-processor "shell-local" { # create an artifice.txt file containing "hello"
inline = [ "echo hello > artifice.txt" ]
}
post-processor "artifice" { # tell packer this is now the new artifact
files = ["artifice.txt"]
}
post-processor "checksum" { # checksum artifice.txt
checksum_types = [ "md5", "sha512" ] # checksum the artifact
keep_input_artifact = true # keep the artifact
}
}
}
```
The [`post-processor` block](/docs/from-1.5/blocks/build/post-processor)
allows to define multiple post-processors that will run from the `Artifact` of
each build. Read the `post-processor` documentation to know how to use a
post-processor.
### Difference bewteen a `post-processor` and a `post-processors` block
These two templates are doing the same thing:
```hcl
# builds.pkr.hcl
build {
# ... build image
post-processor "checksum" { # checksum image
checksum_types = [ "md5", "sha512" ] # checksum the artifact
}
post-processor "amazon-import" { # upload image to AWS
}
post-processor "googlecompute-import" { # upload image to GCP
}
}
```
```hcl
# builds.pkr.hcl
build {
# ... build image
post-processors {
post-processor "checksum" { # checksum image
checksum_types = [ "md5", "sha512" ] # checksum the artifact
}
}
post-processors {
post-processor "amazon-import" { # upload image to AWS
}
}
post-processors {
post-processor "googlecompute-import" { # upload image to GCP
}
}
}
```
Each of these `post-processors` will start after each build -- that is, after
each privision step has run on each source --. In all cases the source image is
going to be deleted.