Add some acceptance testing for Packer init commands (#10566)

This commit is contained in:
Adrien Delorme 2021-02-05 14:06:28 +01:00 committed by GitHub
parent 0f34592daa
commit 692433721d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 388 additions and 0 deletions

View File

@ -122,6 +122,7 @@ func (c *InitCommand) RunContext(buildCtx context.Context, cla *InitArgs) int {
})
if err != nil {
c.Ui.Error(err.Error())
ret = 1
}
if newInstall != nil {
msg := fmt.Sprintf("Installed plugin %s %s in %q", pluginRequirement.Identifier.ForDisplay(), newInstall.Version, newInstall.BinaryPath)

212
command/init_test.go Normal file
View File

@ -0,0 +1,212 @@
package command
import (
"io/ioutil"
"log"
"os"
"path/filepath"
"runtime"
"sort"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/packer-plugin-sdk/acctest"
"golang.org/x/mod/sumdb/dirhash"
)
func TestInitCommand_Run(t *testing.T) {
// These tests will try to optimise for doing the least amount of github api
// requests whilst testing the max amount of things at once. Hopefully they
// don't require a GH token just yet. Acc tests are run on linux, darwin and
// windows, so requests are done 3 times.
type testCase struct {
checkSkip func(*testing.T)
name string
inPluginFolder map[string]string
expectedPackerConfigDirHashBeforeInit string
hclFile string
packerConfigDir string
want int
dirFiles []string
expectedPackerConfigDirHashAfterInit string
}
cfg := &configDirSingleton{map[string]string{}}
tests := []testCase{
{
nil,
// here we pre-write plugins with valid checksums, Packer will
// see those as valid installations it did.
// the directory hash before and after init should be the same,
// that's a no-op. This also should do no GH query, so it is best
// to always run it.
"already-installed-no-op",
map[string]string{
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_darwin_amd64": "1",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_darwin_amd64_SHA256SUM": "6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_windows_amd64.exe": "1.exe",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_windows_amd64.exe_SHA256SUM": "07d8453027192ee0c4120242e6e84e2ca2328b8e0f506e2f818a1a5b82790a0b",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_linux_amd64": "1.out",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_linux_amd64_SHA256SUM": "59031c50e0dfeedfde2b4e9445754804dce3f29e4efa737eead0ca9b4f5b85a5",
},
"h1:Q5qyAOdD43hL3CquQdVfaHpOYGf0UsZ/+wVA9Ry6cbA=",
`# cfg.pkr.hcl
packer {
required_plugins {
comment = {
source = "github.com/sylviamoss/comment"
version = "v0.2.018"
}
}
}`,
cfg.dir("1"),
0,
nil,
"h1:Q5qyAOdD43hL3CquQdVfaHpOYGf0UsZ/+wVA9Ry6cbA=",
},
{
func(t *testing.T) {
if os.Getenv(acctest.TestEnvVar) == "" {
t.Skipf("Acceptance test skipped unless env '%s' set", acctest.TestEnvVar)
}
},
// here we pre-write plugins with valid checksums, Packer will
// see those as valid installations it did.
// But because we require version 0.2.19, we will upgrade.
"already-installed-upgrade",
map[string]string{
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_darwin_amd64": "1",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_darwin_amd64_SHA256SUM": "6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_windows_amd64.exe": "1.exe",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_windows_amd64.exe_SHA256SUM": "07d8453027192ee0c4120242e6e84e2ca2328b8e0f506e2f818a1a5b82790a0b",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_linux_amd64": "1.out",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_linux_amd64_SHA256SUM": "59031c50e0dfeedfde2b4e9445754804dce3f29e4efa737eead0ca9b4f5b85a5",
},
"h1:Q5qyAOdD43hL3CquQdVfaHpOYGf0UsZ/+wVA9Ry6cbA=",
`# cfg.pkr.hcl
packer {
required_plugins {
comment = {
source = "github.com/sylviamoss/comment"
version = "v0.2.019"
}
}
}`,
cfg.dir("2"),
0,
[]string{
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_darwin_amd64",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_darwin_amd64_SHA256SUM",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_linux_amd64",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_linux_amd64_SHA256SUM",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_windows_amd64.exe",
"github.com/sylviamoss/comment/packer-plugin-comment_v0.2.18_x5.0_windows_amd64.exe_SHA256SUM",
map[string]string{
"darwin": "github.com/sylviamoss/comment/packer-plugin-comment_v0.2.19_x5.0_darwin_amd64_SHA256SUM",
"linux": "github.com/sylviamoss/comment/packer-plugin-comment_v0.2.19_x5.0_linux_amd64_SHA256SUM",
"windows": "github.com/sylviamoss/comment/packer-plugin-comment_v0.2.19_x5.0_windows_amd64.exe_SHA256SUM",
}[runtime.GOOS],
map[string]string{
"darwin": "github.com/sylviamoss/comment/packer-plugin-comment_v0.2.19_x5.0_darwin_amd64",
"linux": "github.com/sylviamoss/comment/packer-plugin-comment_v0.2.19_x5.0_linux_amd64",
"windows": "github.com/sylviamoss/comment/packer-plugin-comment_v0.2.19_x5.0_windows_amd64.exe",
}[runtime.GOOS],
},
map[string]string{
"darwin": "h1:ORwcCYUx8z/5n/QvuTJo2vrgKpfJA4AxlNg1G9/BCDI=",
"linux": "h1:CGym0+Nd0LEANgzqL0wx/LDjRL8bYwlpZ0HajPJo/hs=",
"windows": "h1:ag0/C1YjP7KoEDYOiJHE0K+lhFgs0tVgjriWCXVT1fg=",
}[runtime.GOOS],
},
{
func(t *testing.T) {
if os.Getenv(acctest.TestEnvVar) == "" {
t.Skipf("Acceptance test skipped unless env '%s' set", acctest.TestEnvVar)
}
},
"release-with-no-binary",
nil,
"h1:47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=",
`# cfg.pkr.hcl
packer {
required_plugins {
comment = {
source = "github.com/sylviamoss/comment"
version = "v0.2.20"
}
}
}`,
cfg.dir("3"),
1,
nil,
"h1:47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.checkSkip != nil {
tt.checkSkip(t)
if t.Skipped() {
return
}
}
log.Printf("starting %s", tt.name)
createFiles(tt.packerConfigDir, tt.inPluginFolder)
t.Cleanup(func() {
_ = os.RemoveAll(tt.packerConfigDir)
})
hash, err := dirhash.HashDir(tt.packerConfigDir, "", dirhash.DefaultHash)
if err != nil {
t.Fatalf("HashDir: %v", err)
}
if diff := cmp.Diff(tt.expectedPackerConfigDirHashBeforeInit, hash); diff != "" {
t.Errorf("unexpected dir hash before init: %s", diff)
}
cfgDir, err := ioutil.TempDir("", "pkr-test-init-file-folder")
if err != nil {
t.Fatalf("TempDir: %v", err)
}
if err := ioutil.WriteFile(filepath.Join(cfgDir, "cfg.pkr.hcl"), []byte(tt.hclFile), 0666); err != nil {
t.Fatalf("WriteFile: %v", err)
}
t.Cleanup(func() {
_ = os.RemoveAll(cfgDir)
})
args := []string{cfgDir}
c := &InitCommand{
Meta: testMetaFile(t),
}
c.CoreConfig.Components.PluginConfig.KnownPluginFolders = []string{tt.packerConfigDir}
if got := c.Run(args); got != tt.want {
t.Errorf("InitCommand.Run() = %v, want %v", got, tt.want)
}
if tt.dirFiles != nil {
dirFiles, err := dirhash.DirFiles(tt.packerConfigDir, "")
if err != nil {
t.Fatalf("DirFiles: %v", err)
}
sort.Strings(tt.dirFiles)
sort.Strings(dirFiles)
if diff := cmp.Diff(tt.dirFiles, dirFiles); diff != "" {
t.Errorf("found files differ: %v", diff)
}
}
hash, err = dirhash.HashDir(tt.packerConfigDir, "", dirhash.DefaultHash)
if err != nil {
t.Fatalf("HashDir: %v", err)
}
if diff := cmp.Diff(tt.expectedPackerConfigDirHashAfterInit, hash); diff != "" {
t.Errorf("unexpected dir hash after init: %s", diff)
}
})
}
}

41
command/utils_test.go Normal file
View File

@ -0,0 +1,41 @@
package command
import (
"io/ioutil"
"log"
"os"
"path/filepath"
)
func mustString(s string, e error) string {
if e != nil {
panic(e)
}
return s
}
func createFiles(dir string, content map[string]string) {
for relPath, content := range content {
contentPath := filepath.Join(dir, relPath)
if err := os.MkdirAll(filepath.Dir(contentPath), 0777); err != nil {
panic(err)
}
if err := ioutil.WriteFile(contentPath, []byte(content), 0666); err != nil {
panic(err)
}
log.Printf("created tmp file: %s", contentPath)
}
}
type configDirSingleton struct {
dirs map[string]string
}
// when you call dir twice with the same key, the result should be the same
func (c *configDirSingleton) dir(key string) string {
if v, exists := c.dirs[key]; exists {
return v
}
c.dirs[key] = mustString(ioutil.TempDir("", "pkr-test-cfg-dir-"+key))
return c.dirs[key]
}

1
go.mod
View File

@ -90,6 +90,7 @@ require (
github.com/zclconf/go-cty-yaml v1.0.1
golang.org/x/crypto v0.0.0-20201208171446-5f87f3452ae9
golang.org/x/mobile v0.0.0-20201208152944-da85bec010a2
golang.org/x/mod v0.3.0
golang.org/x/net v0.0.0-20201209123823-ac852fbbde11
golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a

132
vendor/golang.org/x/mod/sumdb/dirhash/hash.go generated vendored Normal file
View File

@ -0,0 +1,132 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package dirhash defines hashes over directory trees.
// These hashes are recorded in go.sum files and in the Go checksum database,
// to allow verifying that a newly-downloaded module has the expected content.
package dirhash
import (
"archive/zip"
"crypto/sha256"
"encoding/base64"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"sort"
"strings"
)
// DefaultHash is the default hash function used in new go.sum entries.
var DefaultHash Hash = Hash1
// A Hash is a directory hash function.
// It accepts a list of files along with a function that opens the content of each file.
// It opens, reads, hashes, and closes each file and returns the overall directory hash.
type Hash func(files []string, open func(string) (io.ReadCloser, error)) (string, error)
// Hash1 is the "h1:" directory hash function, using SHA-256.
//
// Hash1 is "h1:" followed by the base64-encoded SHA-256 hash of a summary
// prepared as if by the Unix command:
//
// find . -type f | sort | sha256sum
//
// More precisely, the hashed summary contains a single line for each file in the list,
// ordered by sort.Strings applied to the file names, where each line consists of
// the hexadecimal SHA-256 hash of the file content,
// two spaces (U+0020), the file name, and a newline (U+000A).
//
// File names with newlines (U+000A) are disallowed.
func Hash1(files []string, open func(string) (io.ReadCloser, error)) (string, error) {
h := sha256.New()
files = append([]string(nil), files...)
sort.Strings(files)
for _, file := range files {
if strings.Contains(file, "\n") {
return "", errors.New("dirhash: filenames with newlines are not supported")
}
r, err := open(file)
if err != nil {
return "", err
}
hf := sha256.New()
_, err = io.Copy(hf, r)
r.Close()
if err != nil {
return "", err
}
fmt.Fprintf(h, "%x %s\n", hf.Sum(nil), file)
}
return "h1:" + base64.StdEncoding.EncodeToString(h.Sum(nil)), nil
}
// HashDir returns the hash of the local file system directory dir,
// replacing the directory name itself with prefix in the file names
// used in the hash function.
func HashDir(dir, prefix string, hash Hash) (string, error) {
files, err := DirFiles(dir, prefix)
if err != nil {
return "", err
}
osOpen := func(name string) (io.ReadCloser, error) {
return os.Open(filepath.Join(dir, strings.TrimPrefix(name, prefix)))
}
return hash(files, osOpen)
}
// DirFiles returns the list of files in the tree rooted at dir,
// replacing the directory name dir with prefix in each name.
// The resulting names always use forward slashes.
func DirFiles(dir, prefix string) ([]string, error) {
var files []string
dir = filepath.Clean(dir)
err := filepath.Walk(dir, func(file string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
rel := file
if dir != "." {
rel = file[len(dir)+1:]
}
f := filepath.Join(prefix, rel)
files = append(files, filepath.ToSlash(f))
return nil
})
if err != nil {
return nil, err
}
return files, nil
}
// HashZip returns the hash of the file content in the named zip file.
// Only the file names and their contents are included in the hash:
// the exact zip file format encoding, compression method,
// per-file modification times, and other metadata are ignored.
func HashZip(zipfile string, hash Hash) (string, error) {
z, err := zip.OpenReader(zipfile)
if err != nil {
return "", err
}
defer z.Close()
var files []string
zfiles := make(map[string]*zip.File)
for _, file := range z.File {
files = append(files, file.Name)
zfiles[file.Name] = file
}
zipOpen := func(name string) (io.ReadCloser, error) {
f := zfiles[name]
if f == nil {
return nil, fmt.Errorf("file %q not found in zip", name) // should never happen
}
return f.Open()
}
return hash(files, zipOpen)
}

1
vendor/modules.txt vendored
View File

@ -755,6 +755,7 @@ golang.org/x/mobile/event/key
# golang.org/x/mod v0.3.0
golang.org/x/mod/module
golang.org/x/mod/semver
golang.org/x/mod/sumdb/dirhash
# golang.org/x/net v0.0.0-20201209123823-ac852fbbde11
golang.org/x/net/context
golang.org/x/net/context/ctxhttp