Merge pull request #2205 from mitchellh/f-vtolstov-compress

Added new compress post-processor from Vasiliy Tolstov
This commit is contained in:
Chris Bednarski 2015-06-18 19:09:44 -07:00
commit 1edbbd80bf
14 changed files with 1069 additions and 62 deletions

36
builder/file/artifact.go Normal file
View File

@ -0,0 +1,36 @@
package file
import (
"fmt"
"log"
"os"
)
type FileArtifact struct {
filename string
}
func (*FileArtifact) BuilderId() string {
return BuilderId
}
func (a *FileArtifact) Files() []string {
return []string{a.filename}
}
func (a *FileArtifact) Id() string {
return "File"
}
func (a *FileArtifact) String() string {
return fmt.Sprintf("Stored file: %s", a.filename)
}
func (a *FileArtifact) State(name string) interface{} {
return nil
}
func (a *FileArtifact) Destroy() error {
log.Printf("Deleting %s", a.filename)
return os.Remove(a.filename)
}

View File

@ -0,0 +1,11 @@
package file
import (
"testing"
"github.com/mitchellh/packer/packer"
)
func TestNullArtifact(t *testing.T) {
var _ packer.Artifact = new(FileArtifact)
}

77
builder/file/builder.go Normal file
View File

@ -0,0 +1,77 @@
package file
/*
The File builder creates an artifact from a file. Because it does not require
any virutalization or network resources, it's very fast and useful for testing.
*/
import (
"fmt"
"io"
"io/ioutil"
"os"
"github.com/mitchellh/multistep"
"github.com/mitchellh/packer/packer"
)
const BuilderId = "packer.file"
type Builder struct {
config *Config
runner multistep.Runner
}
func (b *Builder) Prepare(raws ...interface{}) ([]string, error) {
c, warnings, errs := NewConfig(raws...)
if errs != nil {
return warnings, errs
}
b.config = c
return warnings, nil
}
// Run is where the actual build should take place. It takes a Build and a Ui.
func (b *Builder) Run(ui packer.Ui, hook packer.Hook, cache packer.Cache) (packer.Artifact, error) {
artifact := new(FileArtifact)
if b.config.Source != "" {
source, err := os.Open(b.config.Source)
defer source.Close()
if err != nil {
return nil, err
}
// Create will truncate an existing file
target, err := os.Create(b.config.Target)
defer target.Close()
if err != nil {
return nil, err
}
ui.Say(fmt.Sprintf("Copying %s to %s", source.Name(), target.Name()))
bytes, err := io.Copy(target, source)
if err != nil {
return nil, err
}
ui.Say(fmt.Sprintf("Copied %d bytes", bytes))
artifact.filename = target.Name()
} else {
// We're going to write Contents; if it's empty we'll just create an
// empty file.
err := ioutil.WriteFile(b.config.Target, []byte(b.config.Content), 0600)
if err != nil {
return nil, err
}
artifact.filename = b.config.Target
}
return artifact, nil
}
// Cancel cancels a possibly running Builder. This should block until
// the builder actually cancels and cleans up after itself.
func (b *Builder) Cancel() {
b.runner.Cancel()
}

View File

@ -0,0 +1,78 @@
package file
import (
"fmt"
"io/ioutil"
"testing"
builderT "github.com/mitchellh/packer/helper/builder/testing"
"github.com/mitchellh/packer/packer"
)
func TestBuilder_implBuilder(t *testing.T) {
var _ packer.Builder = new(Builder)
}
func TestBuilderFileAcc_content(t *testing.T) {
builderT.Test(t, builderT.TestCase{
Builder: &Builder{},
Template: fileContentTest,
Check: checkContent,
})
}
func TestBuilderFileAcc_copy(t *testing.T) {
builderT.Test(t, builderT.TestCase{
Builder: &Builder{},
Template: fileCopyTest,
Check: checkCopy,
})
}
func checkContent(artifacts []packer.Artifact) error {
content, err := ioutil.ReadFile("contentTest.txt")
if err != nil {
return err
}
contentString := string(content)
if contentString != "hello world!" {
return fmt.Errorf("Unexpected file contents: %s", contentString)
}
return nil
}
func checkCopy(artifacts []packer.Artifact) error {
content, err := ioutil.ReadFile("copyTest.txt")
if err != nil {
return err
}
contentString := string(content)
if contentString != "Hello world.\n" {
return fmt.Errorf("Unexpected file contents: %s", contentString)
}
return nil
}
const fileContentTest = `
{
"builders": [
{
"type":"test",
"target":"contentTest.txt",
"content":"hello world!"
}
]
}
`
const fileCopyTest = `
{
"builders": [
{
"type":"test",
"target":"copyTest.txt",
"source":"test-fixtures/artifact.txt"
}
]
}
`

56
builder/file/config.go Normal file
View File

@ -0,0 +1,56 @@
package file
import (
"fmt"
"github.com/mitchellh/packer/common"
"github.com/mitchellh/packer/helper/config"
"github.com/mitchellh/packer/packer"
"github.com/mitchellh/packer/template/interpolate"
)
var ErrTargetRequired = fmt.Errorf("target required")
var ErrContentSourceConflict = fmt.Errorf("Cannot specify source file AND content")
type Config struct {
common.PackerConfig `mapstructure:",squash"`
Source string `mapstructure:"source"`
Target string `mapstructure:"target"`
Content string `mapstructure:"content"`
}
func NewConfig(raws ...interface{}) (*Config, []string, error) {
c := new(Config)
warnings := []string{}
err := config.Decode(c, &config.DecodeOpts{
Interpolate: true,
InterpolateFilter: &interpolate.RenderFilter{
Exclude: []string{},
},
}, raws...)
if err != nil {
return nil, warnings, err
}
var errs *packer.MultiError
if c.Target == "" {
errs = packer.MultiErrorAppend(errs, ErrTargetRequired)
}
if c.Content == "" && c.Source == "" {
warnings = append(warnings, "Both source file and contents are blank; target will have no content")
}
if c.Content != "" && c.Source != "" {
errs = packer.MultiErrorAppend(errs, ErrContentSourceConflict)
}
if errs != nil && len(errs.Errors) > 0 {
return nil, warnings, errs
}
return c, warnings, nil
}

View File

@ -0,0 +1,45 @@
package file
import (
"strings"
"testing"
)
func testConfig() map[string]interface{} {
return map[string]interface{}{
"source": "src.txt",
"target": "dst.txt",
"content": "Hello, world!",
}
}
func TestContentSourceConflict(t *testing.T) {
raw := testConfig()
_, _, errs := NewConfig(raw)
if !strings.Contains(errs.Error(), ErrContentSourceConflict.Error()) {
t.Errorf("Expected config error: %s", ErrContentSourceConflict.Error())
}
}
func TestNoFilename(t *testing.T) {
raw := testConfig()
delete(raw, "filename")
_, _, errs := NewConfig(raw)
if errs == nil {
t.Errorf("Expected config error: %s", ErrTargetRequired.Error())
}
}
func TestNoContent(t *testing.T) {
raw := testConfig()
delete(raw, "content")
delete(raw, "source")
_, warns, _ := NewConfig(raw)
if len(warns) == 0 {
t.Error("Expected config warning without any content")
}
}

View File

@ -0,0 +1 @@
Hello world.

View File

@ -0,0 +1,15 @@
package main
import (
"github.com/mitchellh/packer/builder/file"
"github.com/mitchellh/packer/packer/plugin"
)
func main() {
server, err := plugin.Server()
if err != nil {
panic(err)
}
server.RegisterBuilder(new(file.Builder))
server.Serve()
}

View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Vasiliy Tolstov
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -10,6 +10,7 @@ const BuilderId = "packer.post-processor.compress"
type Artifact struct {
Path string
Provider string
files []string
}
func NewArtifact(provider, path string) *Artifact {
@ -19,26 +20,26 @@ func NewArtifact(provider, path string) *Artifact {
}
}
func (*Artifact) BuilderId() string {
func (a *Artifact) BuilderId() string {
return BuilderId
}
func (self *Artifact) Id() string {
func (*Artifact) Id() string {
return ""
}
func (self *Artifact) Files() []string {
return []string{self.Path}
func (a *Artifact) Files() []string {
return []string{a.Path}
}
func (self *Artifact) String() string {
return fmt.Sprintf("'%s' compressing: %s", self.Provider, self.Path)
func (a *Artifact) String() string {
return fmt.Sprintf("'%s' compressing: %s", a.Provider, a.Path)
}
func (*Artifact) State(name string) interface{} {
return nil
}
func (self *Artifact) Destroy() error {
return os.Remove(self.Path)
func (a *Artifact) Destroy() error {
return os.Remove(a.Path)
}

View File

@ -0,0 +1,197 @@
// +build ignore
package main
import (
"compress/flate"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"runtime"
"testing"
"github.com/biogo/hts/bgzf"
"github.com/klauspost/pgzip"
"github.com/pierrec/lz4"
)
type Compressor struct {
r *os.File
w *os.File
sr int64
sw int64
}
func (c *Compressor) Close() error {
var err error
fi, _ := c.w.Stat()
c.sw = fi.Size()
if err = c.w.Close(); err != nil {
return err
}
fi, _ = c.r.Stat()
c.sr = fi.Size()
if err = c.r.Close(); err != nil {
return err
}
return nil
}
func NewCompressor(src, dst string) (*Compressor, error) {
r, err := os.Open(src)
if err != nil {
return nil, err
}
w, err := os.Create(dst)
if err != nil {
r.Close()
return nil, err
}
c := &Compressor{r: r, w: w}
return c, nil
}
func main() {
runtime.GOMAXPROCS(runtime.NumCPU())
var resw testing.BenchmarkResult
var resr testing.BenchmarkResult
c, err := NewCompressor("/tmp/image.r", "/tmp/image.w")
if err != nil {
panic(err)
}
resw = testing.Benchmark(c.BenchmarkGZIPWriter)
c.w.Seek(0, 0)
resr = testing.Benchmark(c.BenchmarkGZIPReader)
c.Close()
fmt.Printf("gzip:\twriter %s\treader %s\tsize %d\n", resw.T.String(), resr.T.String(), c.sw)
c, err = NewCompressor("/tmp/image.r", "/tmp/image.w")
if err != nil {
panic(err)
}
resw = testing.Benchmark(c.BenchmarkBGZFWriter)
c.w.Seek(0, 0)
resr = testing.Benchmark(c.BenchmarkBGZFReader)
c.Close()
fmt.Printf("bgzf:\twriter %s\treader %s\tsize %d\n", resw.T.String(), resr.T.String(), c.sw)
c, err = NewCompressor("/tmp/image.r", "/tmp/image.w")
if err != nil {
panic(err)
}
resw = testing.Benchmark(c.BenchmarkPGZIPWriter)
c.w.Seek(0, 0)
resr = testing.Benchmark(c.BenchmarkPGZIPReader)
c.Close()
fmt.Printf("pgzip:\twriter %s\treader %s\tsize %d\n", resw.T.String(), resr.T.String(), c.sw)
c, err = NewCompressor("/tmp/image.r", "/tmp/image.w")
if err != nil {
panic(err)
}
resw = testing.Benchmark(c.BenchmarkLZ4Writer)
c.w.Seek(0, 0)
resr = testing.Benchmark(c.BenchmarkLZ4Reader)
c.Close()
fmt.Printf("lz4:\twriter %s\treader %s\tsize %d\n", resw.T.String(), resr.T.String(), c.sw)
}
func (c *Compressor) BenchmarkGZIPWriter(b *testing.B) {
cw, _ := gzip.NewWriterLevel(c.w, flate.BestSpeed)
b.ResetTimer()
_, err := io.Copy(cw, c.r)
if err != nil {
b.Fatal(err)
}
cw.Close()
c.w.Sync()
}
func (c *Compressor) BenchmarkGZIPReader(b *testing.B) {
cr, _ := gzip.NewReader(c.w)
b.ResetTimer()
_, err := io.Copy(ioutil.Discard, cr)
if err != nil {
b.Fatal(err)
}
}
func (c *Compressor) BenchmarkBGZFWriter(b *testing.B) {
cw, _ := bgzf.NewWriterLevel(c.w, flate.BestSpeed, runtime.NumCPU())
b.ResetTimer()
_, err := io.Copy(cw, c.r)
if err != nil {
b.Fatal(err)
}
c.w.Sync()
}
func (c *Compressor) BenchmarkBGZFReader(b *testing.B) {
cr, _ := bgzf.NewReader(c.w, 0)
b.ResetTimer()
_, err := io.Copy(ioutil.Discard, cr)
if err != nil {
b.Fatal(err)
}
}
func (c *Compressor) BenchmarkPGZIPWriter(b *testing.B) {
cw, _ := pgzip.NewWriterLevel(c.w, flate.BestSpeed)
b.ResetTimer()
_, err := io.Copy(cw, c.r)
if err != nil {
b.Fatal(err)
}
cw.Close()
c.w.Sync()
}
func (c *Compressor) BenchmarkPGZIPReader(b *testing.B) {
cr, _ := pgzip.NewReader(c.w)
b.ResetTimer()
_, err := io.Copy(ioutil.Discard, cr)
if err != nil {
b.Fatal(err)
}
}
func (c *Compressor) BenchmarkLZ4Writer(b *testing.B) {
cw := lz4.NewWriter(c.w)
// cw.Header.HighCompression = true
cw.Header.NoChecksum = true
b.ResetTimer()
_, err := io.Copy(cw, c.r)
if err != nil {
b.Fatal(err)
}
cw.Close()
c.w.Sync()
}
func (c *Compressor) BenchmarkLZ4Reader(b *testing.B) {
cr := lz4.NewReader(c.w)
b.ResetTimer()
_, err := io.Copy(ioutil.Discard, cr)
if err != nil {
b.Fatal(err)
}
}

View File

@ -2,97 +2,317 @@ package compress
import (
"archive/tar"
"archive/zip"
"compress/gzip"
"fmt"
"io"
"os"
"path/filepath"
"regexp"
"runtime"
"github.com/klauspost/pgzip"
"github.com/mitchellh/packer/common"
"github.com/mitchellh/packer/helper/config"
"github.com/mitchellh/packer/packer"
"github.com/mitchellh/packer/template/interpolate"
"github.com/pierrec/lz4"
)
type Config struct {
common.PackerConfig `mapstructure:",squash"`
// Fields from config file
OutputPath string `mapstructure:"output"`
CompressionLevel int `mapstructure:"compression_level"`
KeepInputArtifact bool `mapstructure:"keep_input_artifact"`
ctx interpolate.Context
// Derived fields
Archive string
Algorithm string
ctx *interpolate.Context
}
type PostProcessor struct {
config Config
config *Config
}
func (self *PostProcessor) Configure(raws ...interface{}) error {
err := config.Decode(&self.config, &config.DecodeOpts{
var (
// ErrInvalidCompressionLevel is returned when the compression level passed
// to gzip is not in the expected range. See compress/flate for details.
ErrInvalidCompressionLevel = fmt.Errorf(
"Invalid compression level. Expected an integer from -1 to 9.")
ErrWrongInputCount = fmt.Errorf(
"Can only have 1 input file when not using tar/zip")
filenamePattern = regexp.MustCompile(`(?:\.([a-z0-9]+))`)
)
func (p *PostProcessor) Configure(raws ...interface{}) error {
err := config.Decode(&p.config, &config.DecodeOpts{
Interpolate: true,
InterpolateFilter: &interpolate.RenderFilter{
Exclude: []string{},
},
}, raws...)
errs := new(packer.MultiError)
// If there is no explicit number of Go threads to use, then set it
if os.Getenv("GOMAXPROCS") == "" {
runtime.GOMAXPROCS(runtime.NumCPU())
}
if p.config.OutputPath == "" {
p.config.OutputPath = "packer_{{.BuildName}}_{{.Provider}}"
}
if err = interpolate.Validate(p.config.OutputPath, p.config.ctx); err != nil {
errs = packer.MultiErrorAppend(
errs, fmt.Errorf("Error parsing target template: %s", err))
}
templates := map[string]*string{
"output": &p.config.OutputPath,
}
if p.config.CompressionLevel > pgzip.BestCompression {
p.config.CompressionLevel = pgzip.BestCompression
}
// Technically 0 means "don't compress" but I don't know how to
// differentiate between "user entered zero" and "user entered nothing".
// Also, why bother creating a compressed file with zero compression?
if p.config.CompressionLevel == -1 || p.config.CompressionLevel == 0 {
p.config.CompressionLevel = pgzip.DefaultCompression
}
for key, ptr := range templates {
if *ptr == "" {
errs = packer.MultiErrorAppend(
errs, fmt.Errorf("%s must be set", key))
}
*ptr, err = interpolate.Render(p.config.OutputPath, p.config.ctx)
if err != nil {
return err
errs = packer.MultiErrorAppend(
errs, fmt.Errorf("Error processing %s: %s", key, err))
}
}
p.config.detectFromFilename()
if len(errs.Errors) > 0 {
return errs
}
return nil
}
func (self *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (packer.Artifact, bool, error) {
ui.Say(fmt.Sprintf("Creating archive for '%s'", artifact.BuilderId()))
func (p *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (packer.Artifact, bool, error) {
// Create the compressed archive file at the appropriate OutputPath.
fw, err := os.Create(self.config.OutputPath)
target := p.config.OutputPath
keep := p.config.KeepInputArtifact
newArtifact := &Artifact{Path: target}
outputFile, err := os.Create(target)
if err != nil {
return nil, false, fmt.Errorf(
"Failed creating file for compressed archive: %s", self.config.OutputPath)
"Unable to create archive %s: %s", target, err)
}
defer fw.Close()
defer outputFile.Close()
gw := gzip.NewWriter(fw)
defer gw.Close()
// Setup output interface. If we're using compression, output is a
// compression writer. Otherwise it's just a file.
var output io.WriteCloser
switch p.config.Algorithm {
case "lz4":
ui.Say(fmt.Sprintf("Using lz4 compression with %d cores for %s",
runtime.GOMAXPROCS(-1), target))
output, err = makeLZ4Writer(outputFile, p.config.CompressionLevel)
defer output.Close()
case "pgzip":
ui.Say(fmt.Sprintf("Using pgzip compression with %d cores for %s",
runtime.GOMAXPROCS(-1), target))
output, err = makePgzipWriter(outputFile, p.config.CompressionLevel)
defer output.Close()
default:
output = outputFile
}
// Iterate through all of the artifact's files and put them into the
// compressed archive using the tar/gzip writers.
for _, path := range artifact.Files() {
fi, err := os.Stat(path)
compression := p.config.Algorithm
if compression == "" {
compression = "no compression"
}
// Build an archive, if we're supposed to do that.
switch p.config.Archive {
case "tar":
ui.Say(fmt.Sprintf("Tarring %s with %s", target, compression))
err = createTarArchive(artifact.Files(), output)
if err != nil {
return nil, false, fmt.Errorf(
"Failed stating file: %s", path)
return nil, keep, fmt.Errorf("Error creating tar: %s", err)
}
target, _ := os.Readlink(path)
header, err := tar.FileInfoHeader(fi, target)
case "zip":
ui.Say(fmt.Sprintf("Zipping %s", target))
err = createZipArchive(artifact.Files(), output)
if err != nil {
return nil, false, fmt.Errorf(
"Failed creating archive header: %s", path)
return nil, keep, fmt.Errorf("Error creating zip: %s", err)
}
tw := tar.NewWriter(gw)
defer tw.Close()
// Write the header first to the archive. This takes partial data
// from the FileInfo that is grabbed by running the stat command.
if err := tw.WriteHeader(header); err != nil {
return nil, false, fmt.Errorf(
"Failed writing archive header: %s", path)
default:
// Filename indicates no tarball (just compress) so we'll do an io.Copy
// into our compressor.
if len(artifact.Files()) != 1 {
return nil, keep, fmt.Errorf(
"Can only have 1 input file when not using tar/zip. Found %d "+
"files: %v", len(artifact.Files()), artifact.Files())
}
archiveFile := artifact.Files()[0]
ui.Say(fmt.Sprintf("Archiving %s with %s", archiveFile, compression))
// Open the target file for archiving and compressing.
fr, err := os.Open(path)
source, err := os.Open(archiveFile)
if err != nil {
return nil, false, fmt.Errorf(
"Failed opening file '%s' to write compressed archive.", path)
return nil, keep, fmt.Errorf(
"Failed to open source file %s for reading: %s",
archiveFile, err)
}
defer fr.Close()
defer source.Close()
if _, err = io.Copy(tw, fr); err != nil {
return nil, false, fmt.Errorf(
"Failed copying file to archive: %s", path)
if _, err = io.Copy(output, source); err != nil {
return nil, keep, fmt.Errorf("Failed to compress %s: %s",
archiveFile, err)
}
}
return NewArtifact(artifact.BuilderId(), self.config.OutputPath), false, nil
ui.Say(fmt.Sprintf("Archive %s completed", target))
return newArtifact, keep, nil
}
func (config *Config) detectFromFilename() {
extensions := map[string]string{
"tar": "tar",
"zip": "zip",
"gz": "pgzip",
"lz4": "lz4",
}
result := filenamePattern.FindAllStringSubmatch(config.OutputPath, -1)
// No dots. Bail out with defaults.
if len(result) == 0 {
config.Algorithm = "pgzip"
config.Archive = "tar"
return
}
// Parse the last two .groups, if they're there
lastItem := result[len(result)-1][1]
var nextToLastItem string
if len(result) == 1 {
nextToLastItem = ""
} else {
nextToLastItem = result[len(result)-2][1]
}
// Should we make an archive? E.g. tar or zip?
if nextToLastItem == "tar" {
config.Archive = "tar"
}
if lastItem == "zip" || lastItem == "tar" {
config.Archive = lastItem
// Tar or zip is our final artifact. Bail out.
return
}
// Should we compress the artifact?
algorithm, ok := extensions[lastItem]
if ok {
config.Algorithm = algorithm
// We found our compression algorithm. Bail out.
return
}
// We didn't match a known compression format. Default to tar + pgzip
config.Algorithm = "pgzip"
config.Archive = "tar"
return
}
func makeLZ4Writer(output io.WriteCloser, compressionLevel int) (io.WriteCloser, error) {
lzwriter := lz4.NewWriter(output)
if compressionLevel > gzip.DefaultCompression {
lzwriter.Header.HighCompression = true
}
return lzwriter, nil
}
func makePgzipWriter(output io.WriteCloser, compressionLevel int) (io.WriteCloser, error) {
gzipWriter, err := pgzip.NewWriterLevel(output, compressionLevel)
if err != nil {
return nil, ErrInvalidCompressionLevel
}
gzipWriter.SetConcurrency(500000, runtime.GOMAXPROCS(-1))
return gzipWriter, nil
}
func createTarArchive(files []string, output io.WriteCloser) error {
archive := tar.NewWriter(output)
defer archive.Close()
for _, path := range files {
file, err := os.Open(path)
if err != nil {
return fmt.Errorf("Unable to read file %s: %s", path, err)
}
defer file.Close()
fi, err := file.Stat()
if err != nil {
return fmt.Errorf("Unable to get fileinfo for %s: %s", path, err)
}
header, err := tar.FileInfoHeader(fi, path)
if err != nil {
return fmt.Errorf("Failed to create tar header for %s: %s", path, err)
}
if err := archive.WriteHeader(header); err != nil {
return fmt.Errorf("Failed to write tar header for %s: %s", path, err)
}
if _, err := io.Copy(archive, file); err != nil {
return fmt.Errorf("Failed to copy %s data to archive: %s", path, err)
}
}
return nil
}
func createZipArchive(files []string, output io.WriteCloser) error {
archive := zip.NewWriter(output)
defer archive.Close()
for _, path := range files {
path = filepath.ToSlash(path)
source, err := os.Open(path)
if err != nil {
return fmt.Errorf("Unable to read file %s: %s", path, err)
}
defer source.Close()
target, err := archive.Create(path)
if err != nil {
return fmt.Errorf("Failed to add zip header for %s: %s", path, err)
}
_, err = io.Copy(target, source)
if err != nil {
return fmt.Errorf("Failed to copy %s data to archive: %s", path, err)
}
}
return nil
}

View File

@ -1,3 +1,216 @@
package compress
import ()
import (
"compress/gzip"
"fmt"
"io/ioutil"
"os"
"strings"
"testing"
"github.com/mitchellh/packer/builder/file"
env "github.com/mitchellh/packer/helper/builder/testing"
"github.com/mitchellh/packer/packer"
"github.com/mitchellh/packer/template"
)
func TestDetectFilename(t *testing.T) {
// Test default / fallback with no file extension
nakedFilename := Config{OutputPath: "test"}
nakedFilename.detectFromFilename()
if nakedFilename.Archive != "tar" {
t.Error("Expected to find tar archive setting")
}
if nakedFilename.Algorithm != "pgzip" {
t.Error("Expected to find pgzip algorithm setting")
}
// Test .archive
zipFilename := Config{OutputPath: "test.zip"}
zipFilename.detectFromFilename()
if zipFilename.Archive != "zip" {
t.Error("Expected to find zip archive setting")
}
if zipFilename.Algorithm != "" {
t.Error("Expected to find empty algorithm setting")
}
// Test .compress
lz4Filename := Config{OutputPath: "test.lz4"}
lz4Filename.detectFromFilename()
if lz4Filename.Archive != "" {
t.Error("Expected to find empty archive setting")
}
if lz4Filename.Algorithm != "lz4" {
t.Error("Expected to find lz4 algorithm setting")
}
// Test .archive.compress with some.extra.dots...
lotsOfDots := Config{OutputPath: "test.blah.bloo.blee.tar.lz4"}
lotsOfDots.detectFromFilename()
if lotsOfDots.Archive != "tar" {
t.Error("Expected to find tar archive setting")
}
if lotsOfDots.Algorithm != "lz4" {
t.Error("Expected to find lz4 algorithm setting")
}
}
const expectedFileContents = "Hello world!"
func TestSimpleCompress(t *testing.T) {
const config = `
{
"post-processors": [
{
"type": "compress",
"output": "package.tar.gz"
}
]
}
`
artifact := testArchive(t, config)
defer artifact.Destroy()
fi, err := os.Stat("package.tar.gz")
if err != nil {
t.Errorf("Unable to read archive: %s", err)
}
if fi.IsDir() {
t.Error("Archive should not be a directory")
}
}
func TestZipArchive(t *testing.T) {
const config = `
{
"post-processors": [
{
"type": "compress",
"output": "package.zip"
}
]
}
`
artifact := testArchive(t, config)
defer artifact.Destroy()
// Verify things look good
_, err := os.Stat("package.zip")
if err != nil {
t.Errorf("Unable to read archive: %s", err)
}
}
func TestTarArchive(t *testing.T) {
const config = `
{
"post-processors": [
{
"type": "compress",
"output": "package.tar"
}
]
}
`
artifact := testArchive(t, config)
defer artifact.Destroy()
// Verify things look good
_, err := os.Stat("package.tar")
if err != nil {
t.Errorf("Unable to read archive: %s", err)
}
}
func TestCompressOptions(t *testing.T) {
const config = `
{
"post-processors": [
{
"type": "compress",
"output": "package.gz",
"compression_level": 9
}
]
}
`
artifact := testArchive(t, config)
defer artifact.Destroy()
filename := "package.gz"
archive, _ := os.Open(filename)
gzipReader, _ := gzip.NewReader(archive)
data, _ := ioutil.ReadAll(gzipReader)
if string(data) != expectedFileContents {
t.Errorf("Expected:\n%s\nFound:\n%s\n", expectedFileContents, data)
}
}
// Test Helpers
func setup(t *testing.T) (packer.Ui, packer.Artifact, error) {
// Create fake UI and Cache
ui := packer.TestUi(t)
cache := &packer.FileCache{CacheDir: os.TempDir()}
// Create config for file builder
const fileConfig = `{"builders":[{"type":"file","target":"package.txt","content":"Hello world!"}]}`
tpl, err := template.Parse(strings.NewReader(fileConfig))
if err != nil {
return nil, nil, fmt.Errorf("Unable to parse setup configuration: %s", err)
}
// Prepare the file builder
builder := file.Builder{}
warnings, err := builder.Prepare(tpl.Builders["file"].Config)
if len(warnings) > 0 {
for _, warn := range warnings {
return nil, nil, fmt.Errorf("Configuration warning: %s", warn)
}
}
if err != nil {
return nil, nil, fmt.Errorf("Invalid configuration: %s", err)
}
// Run the file builder
artifact, err := builder.Run(ui, nil, cache)
if err != nil {
return nil, nil, fmt.Errorf("Failed to build artifact: %s", err)
}
return ui, artifact, err
}
func testArchive(t *testing.T, config string) packer.Artifact {
if os.Getenv(env.TestEnvVar) == "" {
t.Skip(fmt.Sprintf(
"Acceptance tests skipped unless env '%s' set", env.TestEnvVar))
}
ui, artifact, err := setup(t)
if err != nil {
t.Fatalf("Error bootstrapping test: %s", err)
}
if artifact != nil {
defer artifact.Destroy()
}
tpl, err := template.Parse(strings.NewReader(config))
if err != nil {
t.Fatalf("Unable to parse test config: %s", err)
}
compressor := PostProcessor{}
compressor.Configure(tpl.PostProcessors[0][0].Config)
artifactOut, _, err := compressor.PostProcess(ui, artifact)
if err != nil {
t.Fatalf("Failed to compress artifact: %s", err)
}
return artifactOut
}

View File

@ -2,7 +2,7 @@
layout: "docs"
page_title: "compress Post-Processor"
description: |-
The Packer compress post-processor takes an artifact with files (such as from VMware or VirtualBox) and gzip compresses the artifact into a single archive.
The Packer compress post-processor takes an artifact with files (such as from VMware or VirtualBox) and compresses the artifact into a single archive.
---
# Compress Post-Processor
@ -10,22 +10,58 @@ description: |-
Type: `compress`
The Packer compress post-processor takes an artifact with files (such as from
VMware or VirtualBox) and gzip compresses the artifact into a single
archive.
VMware or VirtualBox) and compresses the artifact into a single archive.
## Configuration
The configuration for this post-processor is extremely simple.
### Required:
* `output` (string) - The path to save the compressed archive.
You must specify the output filename. The archive format is derived from the filename.
## Example
* `output` (string) - The path to save the compressed archive. The archive
format is inferred from the filename. E.g. `.tar.gz` will be a gzipped
tarball. `.zip` will be a zip file. If the extension can't be detected packer
defaults to `.tar.gz` behavior but will not change the filename.
An example is shown below, showing only the post-processor configuration:
If you are executing multiple builders in parallel you should make sure
`output` is unique for each one. For example `packer_{{.BuildName}}_{{.Provider}}.zip`.
```javascript
### Optional:
If you want more control over how the archive is created you can specify the following settings:
* `compression_level` (integer) - Specify the compression level, for algorithms
that support it, from 1 through 9 inclusive. Typically higher compression
levels take longer but produce smaller files. Defaults to `6`
* `keep_input_artifact` (bool) - Keep source files; defaults to `false`
### Supported Formats
Supported file extensions include `.zip`, `.tar`, `.gz`, `.tar.gz`, `.lz4` and `.tar.lz4`. Note that `.gz` and `.lz4` will fail if you have multiple files to compress.
## Examples
Some minimal examples are shown below, showing only the post-processor configuration:
```json
{
"type": "compress",
"output": "foo.tar.gz"
"output": "archive.tar.lz4"
}
```
```json
{
"type": "compress",
"output": "archive.zip"
}
```
```json
{
"type": "compress",
"output": "archive.gz",
"compression": 9
}
```