Merge pull request #8625 from hashicorp/remove_ansible_proxy

Remove ansible proxy
This commit is contained in:
Megan Marsh 2020-04-03 09:23:10 -07:00 committed by GitHub
commit 8840b4a830
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 829 additions and 103 deletions

View File

@ -63,6 +63,7 @@ func PopulateProvisionHookData(state multistep.StateBag) map[string]interface{}
hookData["ConnType"] = commConf.Type
hookData["SSHPublicKey"] = string(commConf.SSHPublicKey)
hookData["SSHPrivateKey"] = string(commConf.SSHPrivateKey)
hookData["SSHPrivateKeyFile"] = commConf.SSHPrivateKeyFile
// Backwards compatibility; in practice, WinRMPassword is fulfilled by
// Password.

View File

@ -0,0 +1,31 @@
// +build !windows
package ansible
import (
"fmt"
"github.com/hashicorp/packer/packer"
)
type provisionLogicTracker struct {
setupAdapterCalled bool
executeAnsibleCalled bool
happyPath bool
}
func (l *provisionLogicTracker) setupAdapter(ui packer.Ui, comm packer.Communicator) (string, error) {
l.setupAdapterCalled = true
if l.happyPath {
return "fakeKeyString", nil
}
return "", fmt.Errorf("chose sadpath")
}
func (l *provisionLogicTracker) executeAnsible(ui packer.Ui, comm packer.Communicator, privKeyFile string) error {
l.executeAnsibleCalled = true
if l.happyPath {
return fmt.Errorf("Chose sadpath")
}
return nil
}

View File

@ -63,10 +63,14 @@ type Config struct {
UseSFTP bool `mapstructure:"use_sftp"`
InventoryDirectory string `mapstructure:"inventory_directory"`
InventoryFile string `mapstructure:"inventory_file"`
KeepInventoryFile bool `mapstructure:"keep_inventory_file"`
GalaxyFile string `mapstructure:"galaxy_file"`
GalaxyCommand string `mapstructure:"galaxy_command"`
GalaxyForceInstall bool `mapstructure:"galaxy_force_install"`
RolesPath string `mapstructure:"roles_path"`
//TODO: change default to false in v1.6.0.
UseProxy config.Trilean `mapstructure:"use_proxy"`
userWasEmpty bool
}
type Provisioner struct {
@ -76,6 +80,9 @@ type Provisioner struct {
ansibleVersion string
ansibleMajVersion uint
generatedData map[string]interface{}
setupAdapterFunc func(ui packer.Ui, comm packer.Communicator) (string, error)
executeAnsibleFunc func(ui packer.Ui, comm packer.Communicator, privKeyFile string) error
}
func (p *Provisioner) ConfigSpec() hcldec.ObjectSpec { return p.config.FlatMapstructure().HCL2Spec() }
@ -163,6 +170,7 @@ func (p *Provisioner) Prepare(raws ...interface{}) error {
}
if p.config.User == "" {
p.config.userWasEmpty = true
usr, err := user.Current()
if err != nil {
errs = packer.MultiErrorAppend(errs, err)
@ -174,6 +182,16 @@ func (p *Provisioner) Prepare(raws ...interface{}) error {
errs = packer.MultiErrorAppend(errs, fmt.Errorf("user: could not determine current user from environment."))
}
// These fields exist so that we can replace the functions for testing
// logic inside of the Provision func; in actual use, these don't ever
// need to get set.
if p.setupAdapterFunc == nil {
p.setupAdapterFunc = p.setupAdapter
}
if p.executeAnsibleFunc == nil {
p.executeAnsibleFunc = p.executeAnsible
}
if errs != nil && len(errs.Errors) > 0 {
return errs
}
@ -207,40 +225,17 @@ func (p *Provisioner) getVersion() error {
return nil
}
func (p *Provisioner) Provision(ctx context.Context, ui packer.Ui, comm packer.Communicator, generatedData map[string]interface{}) error {
ui.Say("Provisioning with Ansible...")
// Interpolate env vars to check for generated values like password and port
p.generatedData = generatedData
p.config.ctx.Data = generatedData
for i, envVar := range p.config.AnsibleEnvVars {
envVar, err := interpolate.Render(envVar, &p.config.ctx)
if err != nil {
return fmt.Errorf("Could not interpolate ansible env vars: %s", err)
}
p.config.AnsibleEnvVars[i] = envVar
}
// Interpolate extra vars to check for generated values like password and port
for i, arg := range p.config.ExtraArguments {
arg, err := interpolate.Render(arg, &p.config.ctx)
if err != nil {
return fmt.Errorf("Could not interpolate ansible env vars: %s", err)
}
p.config.ExtraArguments[i] = arg
}
func (p *Provisioner) setupAdapter(ui packer.Ui, comm packer.Communicator) (string, error) {
ui.Message("Setting up proxy adapter for Ansible....")
k, err := newUserKey(p.config.SSHAuthorizedKeyFile)
if err != nil {
return err
return "", err
}
hostSigner, err := newSigner(p.config.SSHHostKeyFile)
if err != nil {
return fmt.Errorf("error creating host signer: %s", err)
}
// Remove the private key file
if len(k.privKeyFile) > 0 {
defer os.Remove(k.privKeyFile)
return "", fmt.Errorf("error creating host signer: %s", err)
}
keyChecker := ssh.CertChecker{
@ -298,7 +293,7 @@ func (p *Provisioner) Provision(ctx context.Context, ui packer.Ui, comm packer.C
}()
if err != nil {
return err
return "", err
}
ui = &packer.SafeUi{
@ -307,50 +302,185 @@ func (p *Provisioner) Provision(ctx context.Context, ui packer.Ui, comm packer.C
}
p.adapter = adapter.NewAdapter(p.done, localListener, config, p.config.SFTPCmd, ui, comm)
defer func() {
log.Print("shutting down the SSH proxy")
close(p.done)
p.adapter.Shutdown()
}()
return k.privKeyFile, nil
}
go p.adapter.Serve()
const DefaultSSHInventoryFilev2 = "{{ .HostAlias }} ansible_host={{ .Host }} ansible_user={{ .User }} ansible_port={{ .Port }}\n"
const DefaultSSHInventoryFilev1 = "{{ .HostAlias }} ansible_ssh_host={{ .Host }} ansible_ssh_user={{ .User }} ansible_ssh_port={{ .Port }}\n"
const DefaultWinRMInventoryFilev2 = "{{ .HostAlias}} ansible_host={{ .Host }} ansible_connection=winrm ansible_winrm_transport=basic ansible_shell_type=powershell ansible_user={{ .User}} ansible_port={{ .Port }}\n"
if len(p.config.InventoryFile) == 0 {
tf, err := ioutil.TempFile(p.config.InventoryDirectory, "packer-provisioner-ansible")
if err != nil {
return fmt.Errorf("Error preparing inventory file: %s", err)
}
defer os.Remove(tf.Name())
host := fmt.Sprintf("%s ansible_host=127.0.0.1 ansible_user=%s ansible_port=%d\n",
p.config.HostAlias, p.config.User, p.config.LocalPort)
if p.ansibleMajVersion < 2 {
host = fmt.Sprintf("%s ansible_ssh_host=127.0.0.1 ansible_ssh_user=%s ansible_ssh_port=%d\n",
p.config.HostAlias, p.config.User, p.config.LocalPort)
}
w := bufio.NewWriter(tf)
w.WriteString(host)
for _, group := range p.config.Groups {
fmt.Fprintf(w, "[%s]\n%s", group, host)
}
for _, group := range p.config.EmptyGroups {
fmt.Fprintf(w, "[%s]\n", group)
}
if err := w.Flush(); err != nil {
tf.Close()
return fmt.Errorf("Error preparing inventory file: %s", err)
}
tf.Close()
p.config.InventoryFile = tf.Name()
defer func() {
p.config.InventoryFile = ""
}()
func (p *Provisioner) createInventoryFile() error {
log.Printf("Creating inventory file for Ansible run...")
tf, err := ioutil.TempFile(p.config.InventoryDirectory, "packer-provisioner-ansible")
if err != nil {
return fmt.Errorf("Error preparing inventory file: %s", err)
}
if err := p.executeAnsible(ui, comm, k.privKeyFile); err != nil {
// figure out which inventory line template to use
hostTemplate := DefaultSSHInventoryFilev2
if p.ansibleMajVersion < 2 {
hostTemplate = DefaultSSHInventoryFilev1
}
if p.config.UseProxy.False() && p.generatedData["ConnType"] == "winrm" {
hostTemplate = DefaultWinRMInventoryFilev2
}
// interpolate template to generate host with necessary vars.
ctxData := p.generatedData
ctxData["HostAlias"] = p.config.HostAlias
ctxData["User"] = p.config.User
if !p.config.UseProxy.False() {
ctxData["Host"] = "127.0.0.1"
ctxData["Port"] = p.config.LocalPort
}
p.config.ctx.Data = ctxData
host, err := interpolate.Render(hostTemplate, &p.config.ctx)
if err != nil {
return fmt.Errorf("Error generating inventory file from template: %s", err)
}
w := bufio.NewWriter(tf)
w.WriteString(host)
for _, group := range p.config.Groups {
fmt.Fprintf(w, "[%s]\n%s", group, host)
}
for _, group := range p.config.EmptyGroups {
fmt.Fprintf(w, "[%s]\n", group)
}
if err := w.Flush(); err != nil {
tf.Close()
os.Remove(tf.Name())
return fmt.Errorf("Error preparing inventory file: %s", err)
}
tf.Close()
p.config.InventoryFile = tf.Name()
return nil
}
func (p *Provisioner) Provision(ctx context.Context, ui packer.Ui, comm packer.Communicator, generatedData map[string]interface{}) error {
ui.Say("Provisioning with Ansible...")
// Interpolate env vars to check for generated values like password and port
p.generatedData = generatedData
p.config.ctx.Data = generatedData
for i, envVar := range p.config.AnsibleEnvVars {
envVar, err := interpolate.Render(envVar, &p.config.ctx)
if err != nil {
return fmt.Errorf("Could not interpolate ansible env vars: %s", err)
}
p.config.AnsibleEnvVars[i] = envVar
}
// Interpolate extra vars to check for generated values like password and port
for i, arg := range p.config.ExtraArguments {
arg, err := interpolate.Render(arg, &p.config.ctx)
if err != nil {
return fmt.Errorf("Could not interpolate ansible env vars: %s", err)
}
p.config.ExtraArguments[i] = arg
}
// Set up proxy if host IP is missing or communicator type is wrong.
if p.config.UseProxy.False() {
hostIP := generatedData["Host"].(string)
if hostIP == "" {
ui.Error("Warning: use_proxy is false, but instance does" +
" not have an IP address to give to Ansible. Falling back" +
" to use localhost proxy.")
p.config.UseProxy = config.TriTrue
}
connType := generatedData["ConnType"]
if connType != "ssh" && connType != "winrm" {
ui.Error("Warning: use_proxy is false, but communicator is " +
"neither ssh nor winrm, so without the proxy ansible will not" +
" function. Falling back to localhost proxy.")
p.config.UseProxy = config.TriTrue
}
}
privKeyFile := ""
if !p.config.UseProxy.False() {
// We set up the proxy if useProxy is either true or unset.
pkf, err := p.setupAdapterFunc(ui, comm)
if err != nil {
return err
}
// This is necessary to avoid accidentally redeclaring
// privKeyFile in the scope of this if statement.
privKeyFile = pkf
defer func() {
log.Print("shutting down the SSH proxy")
close(p.done)
p.adapter.Shutdown()
}()
go p.adapter.Serve()
// Remove the private key file
if len(privKeyFile) > 0 {
defer os.Remove(privKeyFile)
}
} else {
connType := generatedData["ConnType"].(string)
switch connType {
case "ssh":
ui.Message("Not using Proxy adapter for Ansible run:\n" +
"\tUsing ssh keys from Packer communicator...")
// In this situation, we need to make sure we have the
// private key we actually use to access the instance.
SSHPrivateKeyFile := generatedData["SSHPrivateKeyFile"].(string)
if SSHPrivateKeyFile != "" {
privKeyFile = SSHPrivateKeyFile
} else {
// See if we can get a private key and write that to a tmpfile
SSHPrivateKey := generatedData["SSHPrivateKey"].(string)
tmpSSHPrivateKey, err := tmp.File("ansible-key")
if err != nil {
return fmt.Errorf("Error writing private key to temp file for"+
"ansible connection: %v", err)
}
_, err = tmpSSHPrivateKey.WriteString(SSHPrivateKey)
if err != nil {
return errors.New("failed to write private key to temp file")
}
err = tmpSSHPrivateKey.Close()
if err != nil {
return errors.New("failed to close private key temp file")
}
privKeyFile = tmpSSHPrivateKey.Name()
}
// Also make sure that the username matches the SSH keys given.
if p.config.userWasEmpty {
p.config.User = generatedData["User"].(string)
}
case "winrm":
ui.Message("Not using Proxy adapter for Ansible run:\n" +
"\tUsing WinRM Password from Packer communicator...")
}
}
if len(p.config.InventoryFile) == 0 {
// Create the inventory file
err := p.createInventoryFile()
if err != nil {
return err
}
if !p.config.KeepInventoryFile {
// Delete the generated inventory file
defer func() {
os.Remove(p.config.InventoryFile)
p.config.InventoryFile = ""
}()
}
}
if err := p.executeAnsibleFunc(ui, comm, privKeyFile); err != nil {
return fmt.Errorf("Error executing Ansible: %s", err)
}
@ -417,11 +547,52 @@ func (p *Provisioner) executeGalaxy(ui packer.Ui, comm packer.Communicator) erro
return nil
}
func (p *Provisioner) createCmdArgs(httpAddr, inventory, playbook, privKeyFile string) (args []string, envVars []string) {
args = []string{}
if p.config.PackerBuildName != "" {
// HCL configs don't currently have the PakcerBuildName. Don't
// cause weirdness with a half-set variable
args = append(args, "-e", fmt.Sprintf("packer_build_name=%s", p.config.PackerBuildName))
}
args = append(args, "-e", fmt.Sprintf("packer_builder_type=%s", p.config.PackerBuilderType))
if len(privKeyFile) > 0 {
// "-e ansible_ssh_private_key_file" is preferable to "--private-key"
// because it is a higher priority variable and therefore won't get
// overridden by dynamic variables. See #5852 for more details.
args = append(args, "-e", fmt.Sprintf("ansible_ssh_private_key_file=%s", privKeyFile))
}
// expose packer_http_addr extra variable
if httpAddr != "" {
args = append(args, "-e", fmt.Sprintf("packer_http_addr=%s", httpAddr))
}
// Add password to ansible call.
if p.config.UseProxy.False() && p.generatedData["ConnType"] == "winrm" {
args = append(args, "-e", fmt.Sprintf("ansible_password=%s", p.generatedData["Password"]))
}
if p.generatedData["ConnType"] == "ssh" {
// Add ssh extra args to set IdentitiesOnly
args = append(args, "--ssh-extra-args", "-o IdentitiesOnly=yes")
}
args = append(args, "-i", inventory, playbook)
args = append(args, p.config.ExtraArguments...)
if len(p.config.AnsibleEnvVars) > 0 {
envVars = append(envVars, p.config.AnsibleEnvVars...)
}
return args, envVars
}
func (p *Provisioner) executeAnsible(ui packer.Ui, comm packer.Communicator, privKeyFile string) error {
playbook, _ := filepath.Abs(p.config.PlaybookFile)
inventory := p.config.InventoryFile
var envvars []string
httpAddr := common.GetHTTPAddr()
// Fetch external dependencies
if len(p.config.GalaxyFile) > 0 {
@ -429,27 +600,8 @@ func (p *Provisioner) executeAnsible(ui packer.Ui, comm packer.Communicator, pri
return fmt.Errorf("Error executing Ansible Galaxy: %s", err)
}
}
args := []string{"--extra-vars", fmt.Sprintf("packer_build_name=%s packer_builder_type=%s -o IdentitiesOnly=yes",
p.config.PackerBuildName, p.config.PackerBuilderType),
"-i", inventory, playbook}
if len(privKeyFile) > 0 {
// Changed this from using --private-key to supplying -e ansible_ssh_private_key_file as the latter
// is treated as a highest priority variable, and thus prevents overriding by dynamic variables
// as seen in #5852
// args = append(args, "--private-key", privKeyFile)
args = append(args, "-e", fmt.Sprintf("ansible_ssh_private_key_file=%s", privKeyFile))
}
// expose packer_http_addr extra variable
httpAddr := common.GetHTTPAddr()
if httpAddr != "" {
args = append(args, "--extra-vars", fmt.Sprintf("packer_http_addr=%s", httpAddr))
}
args = append(args, p.config.ExtraArguments...)
if len(p.config.AnsibleEnvVars) > 0 {
envvars = append(envvars, p.config.AnsibleEnvVars...)
}
args, envvars := p.createCmdArgs(httpAddr, inventory, playbook, privKeyFile)
cmd := exec.Command(p.config.Command, args...)

View File

@ -32,10 +32,12 @@ type FlatConfig struct {
UseSFTP *bool `mapstructure:"use_sftp" cty:"use_sftp"`
InventoryDirectory *string `mapstructure:"inventory_directory" cty:"inventory_directory"`
InventoryFile *string `mapstructure:"inventory_file" cty:"inventory_file"`
KeepInventoryFile *bool `mapstructure:"keep_inventory_file" cty:"keep_inventory_file"`
GalaxyFile *string `mapstructure:"galaxy_file" cty:"galaxy_file"`
GalaxyCommand *string `mapstructure:"galaxy_command" cty:"galaxy_command"`
GalaxyForceInstall *bool `mapstructure:"galaxy_force_install" cty:"galaxy_force_install"`
RolesPath *string `mapstructure:"roles_path" cty:"roles_path"`
UseProxy *bool `mapstructure:"use_proxy" cty:"use_proxy"`
}
// FlatMapstructure returns a new FlatConfig.
@ -73,10 +75,12 @@ func (*FlatConfig) HCL2Spec() map[string]hcldec.Spec {
"use_sftp": &hcldec.AttrSpec{Name: "use_sftp", Type: cty.Bool, Required: false},
"inventory_directory": &hcldec.AttrSpec{Name: "inventory_directory", Type: cty.String, Required: false},
"inventory_file": &hcldec.AttrSpec{Name: "inventory_file", Type: cty.String, Required: false},
"keep_inventory_file": &hcldec.AttrSpec{Name: "keep_inventory_file", Type: cty.Bool, Required: false},
"galaxy_file": &hcldec.AttrSpec{Name: "galaxy_file", Type: cty.String, Required: false},
"galaxy_command": &hcldec.AttrSpec{Name: "galaxy_command", Type: cty.String, Required: false},
"galaxy_force_install": &hcldec.AttrSpec{Name: "galaxy_force_install", Type: cty.Bool, Required: false},
"roles_path": &hcldec.AttrSpec{Name: "roles_path", Type: cty.String, Required: false},
"use_proxy": &hcldec.AttrSpec{Name: "use_proxy", Type: cty.Bool, Required: false},
}
return s
}

View File

@ -14,7 +14,9 @@ import (
"strings"
"testing"
confighelper "github.com/hashicorp/packer/helper/config"
"github.com/hashicorp/packer/packer"
"github.com/stretchr/testify/assert"
)
// Be sure to remove the Ansible stub file in each test with:
@ -354,3 +356,335 @@ func TestAnsibleLongMessages(t *testing.T) {
t.Fatalf("err: %s", err)
}
}
func TestCreateInventoryFile(t *testing.T) {
type inventoryFileTestCases struct {
AnsibleVersion uint
User string
Groups []string
EmptyGroups []string
UseProxy confighelper.Trilean
GeneratedData map[string]interface{}
Expected string
}
TestCases := []inventoryFileTestCases{
{
AnsibleVersion: 1,
User: "testuser",
UseProxy: confighelper.TriFalse,
GeneratedData: basicGenData(nil),
Expected: "default ansible_ssh_host=123.45.67.89 ansible_ssh_user=testuser ansible_ssh_port=1234\n",
},
{
AnsibleVersion: 2,
User: "testuser",
UseProxy: confighelper.TriFalse,
GeneratedData: basicGenData(nil),
Expected: "default ansible_host=123.45.67.89 ansible_user=testuser ansible_port=1234\n",
},
{
AnsibleVersion: 1,
User: "testuser",
Groups: []string{"Group1", "Group2"},
UseProxy: confighelper.TriFalse,
GeneratedData: basicGenData(nil),
Expected: `default ansible_ssh_host=123.45.67.89 ansible_ssh_user=testuser ansible_ssh_port=1234
[Group1]
default ansible_ssh_host=123.45.67.89 ansible_ssh_user=testuser ansible_ssh_port=1234
[Group2]
default ansible_ssh_host=123.45.67.89 ansible_ssh_user=testuser ansible_ssh_port=1234
`,
},
{
AnsibleVersion: 1,
User: "testuser",
EmptyGroups: []string{"Group1", "Group2"},
UseProxy: confighelper.TriFalse,
GeneratedData: basicGenData(nil),
Expected: `default ansible_ssh_host=123.45.67.89 ansible_ssh_user=testuser ansible_ssh_port=1234
[Group1]
[Group2]
`,
},
{
AnsibleVersion: 1,
User: "testuser",
Groups: []string{"Group1", "Group2"},
EmptyGroups: []string{"Group3"},
UseProxy: confighelper.TriFalse,
GeneratedData: basicGenData(nil),
Expected: `default ansible_ssh_host=123.45.67.89 ansible_ssh_user=testuser ansible_ssh_port=1234
[Group1]
default ansible_ssh_host=123.45.67.89 ansible_ssh_user=testuser ansible_ssh_port=1234
[Group2]
default ansible_ssh_host=123.45.67.89 ansible_ssh_user=testuser ansible_ssh_port=1234
[Group3]
`,
},
{
AnsibleVersion: 2,
User: "testuser",
UseProxy: confighelper.TriFalse,
GeneratedData: basicGenData(map[string]interface{}{
"ConnType": "winrm",
"Password": "12345",
}),
Expected: "default ansible_host=123.45.67.89 ansible_connection=winrm ansible_winrm_transport=basic ansible_shell_type=powershell ansible_user=testuser ansible_port=1234\n",
},
}
for _, tc := range TestCases {
var p Provisioner
p.Prepare(testConfig(t))
defer os.Remove(p.config.Command)
p.ansibleMajVersion = tc.AnsibleVersion
p.config.User = tc.User
p.config.Groups = tc.Groups
p.config.EmptyGroups = tc.EmptyGroups
p.config.UseProxy = tc.UseProxy
p.generatedData = tc.GeneratedData
err := p.createInventoryFile()
if err != nil {
t.Fatalf("error creating config using localhost and local port proxy")
}
if p.config.InventoryFile == "" {
t.Fatalf("No inventory file was created")
}
defer os.Remove(p.config.InventoryFile)
f, err := ioutil.ReadFile(p.config.InventoryFile)
if err != nil {
t.Fatalf("couldn't read created inventoryfile: %s", err)
}
expected := tc.Expected
if fmt.Sprintf("%s", f) != expected {
t.Fatalf("File didn't match expected:\n\n expected: \n%s\n; recieved: \n%s\n", expected, f)
}
}
}
func basicGenData(input map[string]interface{}) map[string]interface{} {
gd := map[string]interface{}{
"Host": "123.45.67.89",
"Port": int64(1234),
"ConnType": "ssh",
"SSHPrivateKeyFile": "",
"SSHPrivateKey": "asdf",
"User": "PartyPacker",
}
if input == nil {
return gd
}
for k, v := range input {
gd[k] = v
}
return gd
}
func TestCreateCmdArgs(t *testing.T) {
type testcase struct {
PackerBuildName string
PackerBuilderType string
UseProxy confighelper.Trilean
generatedData map[string]interface{}
ExtraArguments []string
AnsibleEnvVars []string
callArgs []string // httpAddr inventory playbook privKeyFile
ExpectedArgs []string
ExpectedEnvVars []string
}
TestCases := []testcase{
{
// SSH with private key and an extra argument.
PackerBuildName: "packerparty",
generatedData: basicGenData(nil),
ExtraArguments: []string{"-e", "hello-world"},
AnsibleEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
callArgs: []string{"", "/var/inventory", "test-playbook.yml", "/path/to/privkey.pem"},
ExpectedArgs: []string{"-e", "packer_build_name=packerparty", "-e", "packer_builder_type=fakebuilder", "-e", "ansible_ssh_private_key_file=/path/to/privkey.pem", "--ssh-extra-args", "-o IdentitiesOnly=yes", "-i", "/var/inventory", "test-playbook.yml", "-e", "hello-world"},
ExpectedEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
},
{
PackerBuildName: "packerparty",
UseProxy: confighelper.TriTrue,
generatedData: basicGenData(nil),
ExtraArguments: []string{"-e", "hello-world"},
callArgs: []string{"", "/var/inventory", "test-playbook.yml", "/path/to/privkey.pem"},
ExpectedArgs: []string{"-e", "packer_build_name=packerparty", "-e", "packer_builder_type=fakebuilder", "-e", "ansible_ssh_private_key_file=/path/to/privkey.pem", "--ssh-extra-args", "-o IdentitiesOnly=yes", "-i", "/var/inventory", "test-playbook.yml", "-e", "hello-world"},
ExpectedEnvVars: []string{},
},
{
// Winrm, but no_proxy is unset so we don't do anything with ansible_password.
PackerBuildName: "packerparty",
generatedData: basicGenData(map[string]interface{}{
"ConnType": "winrm",
}),
ExtraArguments: []string{"-e", "hello-world"},
AnsibleEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
callArgs: []string{"", "/var/inventory", "test-playbook.yml", ""},
ExpectedArgs: []string{"-e", "packer_build_name=packerparty", "-e", "packer_builder_type=fakebuilder", "-i", "/var/inventory", "test-playbook.yml", "-e", "hello-world"},
ExpectedEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
},
{
// HTTPAddr should be set. No env vars.
PackerBuildName: "packerparty",
ExtraArguments: []string{"-e", "hello-world"},
generatedData: basicGenData(nil),
callArgs: []string{"123.45.67.89", "/var/inventory", "test-playbook.yml", ""},
ExpectedArgs: []string{"-e", "packer_build_name=packerparty", "-e", "packer_builder_type=fakebuilder", "-e", "packer_http_addr=123.45.67.89", "--ssh-extra-args", "-o IdentitiesOnly=yes", "-i", "/var/inventory", "test-playbook.yml", "-e", "hello-world"},
ExpectedEnvVars: []string{},
},
{
// Add ansible_password for proxyless winrm connection.
UseProxy: confighelper.TriFalse,
generatedData: basicGenData(map[string]interface{}{
"ConnType": "winrm",
"Password": "ilovebananapancakes",
}),
AnsibleEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
callArgs: []string{"123.45.67.89", "/var/inventory", "test-playbook.yml", ""},
ExpectedArgs: []string{"-e", "packer_builder_type=fakebuilder", "-e", "packer_http_addr=123.45.67.89", "-e", "ansible_password=ilovebananapancakes", "-i", "/var/inventory", "test-playbook.yml"},
ExpectedEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
},
{
// Neither special ssh stuff, nor special windows stuff. This is docker!
PackerBuildName: "packerparty",
generatedData: basicGenData(map[string]interface{}{
"ConnType": "docker",
}),
ExtraArguments: []string{"-e", "hello-world"},
AnsibleEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
callArgs: []string{"", "/var/inventory", "test-playbook.yml", ""},
ExpectedArgs: []string{"-e", "packer_build_name=packerparty", "-e", "packer_builder_type=fakebuilder", "-i", "/var/inventory", "test-playbook.yml", "-e", "hello-world"},
ExpectedEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
},
{
// Windows, no proxy, with extra vars.
UseProxy: confighelper.TriFalse,
generatedData: basicGenData(map[string]interface{}{
"ConnType": "winrm",
"Password": "ilovebananapancakes",
}),
ExtraArguments: []string{"-e", "hello-world"},
AnsibleEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
callArgs: []string{"123.45.67.89", "/var/inventory", "test-playbook.yml", ""},
ExpectedArgs: []string{"-e", "packer_builder_type=fakebuilder", "-e", "packer_http_addr=123.45.67.89", "-e", "ansible_password=ilovebananapancakes", "-i", "/var/inventory", "test-playbook.yml", "-e", "hello-world"},
ExpectedEnvVars: []string{"ENV_1=pancakes", "ENV_2=bananas"},
},
{
// No builder name. This shouldn't cause an error, it just shouldn't be set. HCL, yo.
generatedData: basicGenData(nil),
callArgs: []string{"", "/var/inventory", "test-playbook.yml", ""},
ExpectedArgs: []string{"-e", "packer_builder_type=fakebuilder", "--ssh-extra-args", "-o IdentitiesOnly=yes", "-i", "/var/inventory", "test-playbook.yml"},
ExpectedEnvVars: []string{},
},
}
for _, tc := range TestCases {
var p Provisioner
p.Prepare(testConfig(t))
defer os.Remove(p.config.Command)
p.config.UseProxy = tc.UseProxy
p.config.PackerBuilderType = "fakebuilder"
p.config.PackerBuildName = tc.PackerBuildName
p.generatedData = tc.generatedData
p.config.ExtraArguments = tc.ExtraArguments
p.config.AnsibleEnvVars = tc.AnsibleEnvVars
args, envVars := p.createCmdArgs(tc.callArgs[0], tc.callArgs[1], tc.callArgs[2], tc.callArgs[3])
assert.ElementsMatch(t, args, tc.ExpectedArgs,
"Args didn't match expected:\n\n expected: \n%s\n; recieved: \n%s\n", tc.ExpectedArgs, args)
assert.ElementsMatch(t, envVars, tc.ExpectedEnvVars, "EnvVars didn't match expected:\n\n expected: \n%s\n; recieved: \n%s\n", tc.ExpectedEnvVars, envVars)
}
}
func TestUseProxy(t *testing.T) {
type testcase struct {
UseProxy confighelper.Trilean
generatedData map[string]interface{}
expectedSetupAdapterCalled bool
explanation string
}
tcs := []testcase{
{
explanation: "use_proxy is true; we should set up adapter",
UseProxy: confighelper.TriTrue,
generatedData: basicGenData(nil),
expectedSetupAdapterCalled: true,
},
{
explanation: "use_proxy is false but no IP addr is available; we should set up adapter anyway.",
UseProxy: confighelper.TriFalse,
generatedData: basicGenData(map[string]interface{}{
"Host": "",
"Port": nil,
}),
expectedSetupAdapterCalled: true,
},
{
explanation: "use_proxy is false; we shouldn't set up adapter.",
UseProxy: confighelper.TriFalse,
generatedData: basicGenData(nil),
expectedSetupAdapterCalled: false,
},
{
explanation: "use_proxy is false but connType isn't ssh or winrm.",
UseProxy: confighelper.TriFalse,
generatedData: basicGenData(map[string]interface{}{
"ConnType": "docker",
}),
expectedSetupAdapterCalled: true,
},
{
explanation: "use_proxy is unset; we should default to setting up the adapter (for now).",
UseProxy: confighelper.TriUnset,
generatedData: basicGenData(nil),
expectedSetupAdapterCalled: true,
},
{
explanation: "use_proxy is false and connType is winRM. we should not set up the adapter.",
UseProxy: confighelper.TriFalse,
generatedData: basicGenData(map[string]interface{}{
"ConnType": "winrm",
}),
expectedSetupAdapterCalled: false,
},
{
explanation: "use_proxy is unset and connType is winRM. we should set up the adapter.",
UseProxy: confighelper.TriUnset,
generatedData: basicGenData(map[string]interface{}{
"ConnType": "winrm",
}),
expectedSetupAdapterCalled: true,
},
}
for _, tc := range tcs {
var p Provisioner
p.Prepare(testConfig(t))
p.config.UseProxy = tc.UseProxy
defer os.Remove(p.config.Command)
p.ansibleMajVersion = 1
var l provisionLogicTracker
l.setupAdapterCalled = false
p.setupAdapterFunc = l.setupAdapter
p.executeAnsibleFunc = l.executeAnsible
ctx := context.TODO()
comm := new(packer.MockCommunicator)
ui := &packer.BasicUi{
Reader: new(bytes.Buffer),
Writer: new(bytes.Buffer),
}
p.Provision(ctx, ui, comm, tc.generatedData)
if l.setupAdapterCalled != tc.expectedSetupAdapterCalled {
t.Fatalf("%s", tc.explanation)
}
os.Remove(p.config.Command)
}
}

View File

@ -83,7 +83,7 @@ necessary for this build to succeed and can be found further down the page.
### Block Devices Configuration
Block devices can be nested in the
[ami_block_device_mappings](#ami_block_device_mappings) or the
[ami_block_device_mappings](#ami_block_device_mappings) or the
[launch_block_device_mappings](#launch_block_device_mappings) array.
<%= partial "partials/builder/amazon/common/BlockDevice" %>
@ -241,6 +241,100 @@ after termination. If you need to preserve those source volumes, you can
overwrite the termination setting by specifying `delete_on_termination=false`
in the `launch_block_device_mappings` block for the device.
## Connecting to Windows instances using WinRM
If you want to launch a Windows instance and connect using WinRM, you will need
to configure WinRM on that instance. The following is a basic powershell script
that can be supplied to AWS using the "user_data_file" option. It enables
WinRM via HTTPS on port 5986, and creates a self-signed certificate to use to
connect. If you are using a certificate from a CA, rather than creating a
self-signed certificate, you can omit the "winrm_insecure" option mentioned
below.
autogenerated_password_https_boostrap.txt
``` ps1
<powershell>
# MAKE SURE IN YOUR PACKER CONFIG TO SET:
#
#
# "winrm_username": "Administrator",
# "winrm_insecure": true,
# "winrm_use_ssl": true,
#
#
write-output "Running User Data Script"
write-host "(host) Running User Data Script"
Set-ExecutionPolicy Unrestricted -Scope LocalMachine -Force -ErrorAction Ignore
# Don't set this before Set-ExecutionPolicy as it throws an error
$ErrorActionPreference = "stop"
# Remove HTTP listener
Remove-Item -Path WSMan:\Localhost\listener\listener* -Recurse
# Create a self-signed certificate to let ssl work
$Cert = New-SelfSignedCertificate -CertstoreLocation Cert:\LocalMachine\My -DnsName "packer"
New-Item -Path WSMan:\LocalHost\Listener -Transport HTTPS -Address * -CertificateThumbPrint $Cert.Thumbprint -Force
# WinRM
write-output "Setting up WinRM"
write-host "(host) setting up WinRM"
cmd.exe /c winrm quickconfig -q
cmd.exe /c winrm set "winrm/config" '@{MaxTimeoutms="1800000"}'
cmd.exe /c winrm set "winrm/config/winrs" '@{MaxMemoryPerShellMB="1024"}'
cmd.exe /c winrm set "winrm/config/service" '@{AllowUnencrypted="true"}'
cmd.exe /c winrm set "winrm/config/client" '@{AllowUnencrypted="true"}'
cmd.exe /c winrm set "winrm/config/service/auth" '@{Basic="true"}'
cmd.exe /c winrm set "winrm/config/client/auth" '@{Basic="true"}'
cmd.exe /c winrm set "winrm/config/service/auth" '@{CredSSP="true"}'
cmd.exe /c winrm set "winrm/config/listener?Address=*+Transport=HTTPS" "@{Port=`"5986`";Hostname=`"packer`";CertificateThumbprint=`"$($Cert.Thumbprint)`"}"
cmd.exe /c netsh advfirewall firewall set rule group="remote administration" new enable=yes
cmd.exe /c netsh firewall add portopening TCP 5986 "Port 5986"
cmd.exe /c net stop winrm
cmd.exe /c sc config winrm start= auto
cmd.exe /c net start winrm
</powershell>
```
You'll notice that this config does not define a user or password; instead,
Packer will ask AWS to provide a random password that it generates
automatically. The following config will work with the above template:
```
{
"builders": [
{
"type": "amazon-ebs",
"region": "us-east-1",
"instance_type": "t2.micro",
"source_ami_filter": {
"filters": {
"virtualization-type": "hvm",
"name": "*Windows_Server-2012*English-64Bit-Base*",
"root-device-type": "ebs"
},
"most_recent": true,
"owners": "amazon"
},
"ami_name": "default-packer",
"user_data_file": "winrm_bootstrap.txt",
"communicator": "winrm",
"force_deregister": true,
"winrm_insecure": true,
"winrm_username": "Administrator",
"winrm_use_ssl": true
}]
}
```
## Windows 2016 Sysprep Commands - For Amazon Windows AMIs Only
For Amazon Windows 2016 AMIs it is necessary to run Sysprep commands which can

View File

@ -132,14 +132,14 @@ Optional Parameters:
- `groups` (array of strings) - The groups into which the Ansible host should
be placed. When unspecified, the host is not associated with any groups.
- `inventory_file` (string) - The inventory file to use during provisioning.
When unspecified, Packer will create a temporary inventory file and will
use the `host_alias`.
- `host_alias` (string) - The alias by which the Ansible host should be
known. Defaults to `default`. This setting is ignored when using a custom
inventory file.
- `inventory_file` (string) - The inventory file to use during provisioning.
When unspecified, Packer will create a temporary inventory file and will
use the `host_alias`.
- `inventory_directory` (string) - The directory in which to place the
temporary generated Ansible inventory file. By default, this is the
system-specific temporary file location. The fully-qualified name of this
@ -148,6 +148,12 @@ Optional Parameters:
inventory directory with `host_vars` `group_vars` that you would like to
use in the playbook that this provisioner will run.
- `keep_inventory_file` (boolean) - If `true`, the Ansible provisioner will
not delete the temporary inventory file it creates in order to connect to
the instance. This is useful if you are trying to debug your ansible run
and using "--on-error=ask" in order to leave your instance running while you
test your playbook. this option is not used if you set an `inventory_file`.
- `local_port` (uint) - The port on which to attempt to listen for SSH
connections. This value is a starting point. The provisioner will attempt
listen for SSH connections on the first available of ten ports, starting at
@ -181,7 +187,29 @@ Optional Parameters:
`ansible-playbook` with the `-e ansible_ssh_private_key_file` option.
- `user` (string) - The `ansible_user` to use. Defaults to the user running
packer.
packer, NOT the user set for your communicator. If you want to use the same
user as the communicator, you will need to manually set it again in this
field.
- `use_proxy` (boolean) - When `true`, set up a localhost proxy adapter
so that Ansible has an IP address to connect to, even if your guest does not
have an IP address. For example, the adapter is necessary for Docker builds
to use the Ansible provisioner. If you set this option to `false`, but
Packer cannot find an IP address to connect Ansible to, it will
automatically set up the adapter anyway.
In order for Ansible to connect properly even when use_proxy is false, you
need to make sure that you are either providing a valid username and ssh key
to the ansible provisioner directly, or that the username and ssh key
being used by the ssh communicator will work for your needs. If you do not
provide a user to ansible, it will use the user associated with your
builder, not the user running Packer.
use_proxy=false is currently only supported for SSH and WinRM.
Currently, this defaults to `true` for all connection types. In the future,
this option will be changed to default to `false` for SSH and WinRM
connections where the provisioner has access to a host IP.
<%= partial "partials/provisioners/common-config" %>
@ -256,13 +284,87 @@ connection to chroot and running Ansible as root/sudo.
}
```
### winrm communicator
### WinRM Communicator
Windows builds require a custom Ansible connection plugin and a particular
configuration. Assuming a directory named `connection_plugins` is next to the
playbook and contains a file named `packer.py` which implements the connection
plugin. On versions of Ansible before 2.4.x, the following works as the
connection plugin
There are two possible methods for using ansible with the WinRM communicator.
#### Method 1 (recommended)
The recommended way to use the WinRM communicator is to set `"use_proxy": false`
and let the Ansible provisioner handle the rest for you. If you
are using WinRM with HTTPS, and you are using a self-signed certificate you
will also have to set `ansible_winrm_server_cert_validation=ignore` in your
extra_arguments.
Below is a fully functioning Ansible example using WinRM:
```
{
"builders": [
{
"type": "amazon-ebs",
"region": "us-east-1",
"instance_type": "t2.micro",
"source_ami_filter": {
"filters": {
"virtualization-type": "hvm",
"name": "*Windows_Server-2012*English-64Bit-Base*",
"root-device-type": "ebs"
},
"most_recent": true,
"owners": "amazon"
},
"ami_name": "default-packer",
"user_data_file": "windows_bootstrap.txt",
"communicator": "winrm",
"force_deregister": true,
"winrm_insecure": true,
"winrm_username": "Administrator",
"winrm_use_ssl": true
}],
"provisioners": [
{
"type": "ansible",
"playbook_file": "./playbook.yml",
"user": "Administrator",
"use_proxy": false,
"extra_arguments": [
"-e", "ansible_winrm_server_cert_validation=ignore"
]
}
]
}
```
Note that you do have to set the "Administrator" user, because otherwise Ansible
will default to using the user that is calling Packer, rather than the user
configured inside of the Packer communicator. For the contents of
windows_bootstrap.txt, see the winrm docs for the amazon-ebs communicator.
When running from OSX, you may see an error like:
```
amazon-ebs: objc[9752]: +[__NSCFConstantString initialize] may have been in progress in another thread when fork() was called.
amazon-ebs: objc[9752]: +[__NSCFConstantString initialize] may have been in progress in another thread when fork() was called. We cannot safely call it or ignore it in the fork() child process. Crashing instead. Set a breakpoint on objc_initializeAfterForkError to debug.
amazon-ebs: ERROR! A worker was found in a dead state
```
If you see this, you may be able to work around the issue by telling Ansible to
explicitly not use any proxying; you can do this by setting the template option
```
"ansible_env_vars": ["no_proxy=\"*\""],
```
in the above Ansible template.
#### Method 2 (Not recommended)
If you want to use the Packer ssh proxy, then you need a custom Ansible
connection plugin and a particular configuration. You need a directory named
`connection_plugins` next to the playbook which contains a file named
packer.py` which implements the connection plugin. On versions of Ansible
before 2.4.x, the following works as the connection plugin:
``` python
from __future__ import (absolute_import, division, print_function)
@ -345,7 +447,7 @@ Platform:
"builders": [
{
"type": "googlecompute",
"account_file": "{{user `account_file`}}",
"account_file": "{{ user `account_file`}}",
"project_id": "{{user `project_id`}}",
"source_image": "windows-server-2012-r2-dc-v20160916",
"communicator": "winrm",
@ -473,3 +575,11 @@ Example playbook:
yum:
name: httpd
```
### Troubleshooting
If you are using an Ansible version >= 2.8 and Packer hangs in the
"Gathering Facts" stage, this could be the result of a pipelineing issue with
the proxy adapter that Packer uses. Setting `use_proxy: false,` in your
Packer config should resolve the issue. In the future we will default to setting
this, so you won't have to but for now it is a manual change you must make.