Created googlecompute-export post-processor. Modified the googlecompute builder to pass a few configuration values to its resulting artifact. These values can then be used in googlecompute-export post-processor.
Added documentation and changed authentication stuff on temporary VM.
This commit is contained in:
parent
2a5ca026f1
commit
55b5a45ae4
|
@ -9,7 +9,7 @@ import (
|
|||
)
|
||||
|
||||
// accountFile represents the structure of the account file JSON file.
|
||||
type accountFile struct {
|
||||
type AccountFile struct {
|
||||
PrivateKeyId string `json:"private_key_id"`
|
||||
PrivateKey string `json:"private_key"`
|
||||
ClientEmail string `json:"client_email"`
|
||||
|
@ -22,7 +22,7 @@ func parseJSON(result interface{}, text string) error {
|
|||
return dec.Decode(result)
|
||||
}
|
||||
|
||||
func processAccountFile(account_file *accountFile, text string) error {
|
||||
func ProcessAccountFile(account_file *AccountFile, text string) error {
|
||||
// Assume text is a JSON string
|
||||
if err := parseJSON(account_file, text); err != nil {
|
||||
// If text was not JSON, assume it is a file path instead
|
||||
|
|
|
@ -7,8 +7,9 @@ import (
|
|||
|
||||
// Artifact represents a GCE image as the result of a Packer build.
|
||||
type Artifact struct {
|
||||
image Image
|
||||
driver Driver
|
||||
image Image
|
||||
driver Driver
|
||||
config *Config
|
||||
}
|
||||
|
||||
// BuilderId returns the builder Id.
|
||||
|
@ -39,5 +40,17 @@ func (a *Artifact) String() string {
|
|||
}
|
||||
|
||||
func (a *Artifact) State(name string) interface{} {
|
||||
switch name {
|
||||
case "ImageName":
|
||||
return a.image.Name
|
||||
case "ImageSizeGb":
|
||||
return a.image.SizeGb
|
||||
case "AccountFilePath":
|
||||
return a.config.AccountFile
|
||||
case "ProjectId":
|
||||
return a.config.ProjectId
|
||||
case "BuildZone":
|
||||
return a.config.Zone
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ func (b *Builder) Prepare(raws ...interface{}) ([]string, error) {
|
|||
// representing a GCE machine image.
|
||||
func (b *Builder) Run(ui packer.Ui, hook packer.Hook, cache packer.Cache) (packer.Artifact, error) {
|
||||
driver, err := NewDriverGCE(
|
||||
ui, b.config.ProjectId, &b.config.account)
|
||||
ui, b.config.ProjectId, &b.config.Account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -95,6 +95,7 @@ func (b *Builder) Run(ui packer.Ui, hook packer.Hook, cache packer.Cache) (packe
|
|||
artifact := &Artifact{
|
||||
image: state.Get("image").(Image),
|
||||
driver: driver,
|
||||
config: b.config,
|
||||
}
|
||||
return artifact, nil
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ type Config struct {
|
|||
UseInternalIP bool `mapstructure:"use_internal_ip"`
|
||||
Zone string `mapstructure:"zone"`
|
||||
|
||||
account accountFile
|
||||
Account AccountFile
|
||||
privateKeyBytes []byte
|
||||
stateTimeout time.Duration
|
||||
ctx interpolate.Context
|
||||
|
@ -157,15 +157,13 @@ func NewConfig(raws ...interface{}) (*Config, []string, error) {
|
|||
c.Region = region
|
||||
}
|
||||
|
||||
stateTimeout, err := time.ParseDuration(c.RawStateTimeout)
|
||||
err = c.CalcTimeout()
|
||||
if err != nil {
|
||||
errs = packer.MultiErrorAppend(
|
||||
errs, fmt.Errorf("Failed parsing state_timeout: %s", err))
|
||||
errs = packer.MultiErrorAppend(errs, err)
|
||||
}
|
||||
c.stateTimeout = stateTimeout
|
||||
|
||||
if c.AccountFile != "" {
|
||||
if err := processAccountFile(&c.account, c.AccountFile); err != nil {
|
||||
if err := ProcessAccountFile(&c.Account, c.AccountFile); err != nil {
|
||||
errs = packer.MultiErrorAppend(errs, err)
|
||||
}
|
||||
}
|
||||
|
@ -185,3 +183,12 @@ func NewConfig(raws ...interface{}) (*Config, []string, error) {
|
|||
|
||||
return c, nil, nil
|
||||
}
|
||||
|
||||
func (c *Config) CalcTimeout() error {
|
||||
stateTimeout, err := time.ParseDuration(c.RawStateTimeout)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed parsing state_timeout: %s", err)
|
||||
}
|
||||
c.stateTimeout = stateTimeout
|
||||
return nil
|
||||
}
|
|
@ -26,7 +26,7 @@ type driverGCE struct {
|
|||
|
||||
var DriverScopes = []string{"https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.full_control"}
|
||||
|
||||
func NewDriverGCE(ui packer.Ui, p string, a *accountFile) (Driver, error) {
|
||||
func NewDriverGCE(ui packer.Ui, p string, a *AccountFile) (Driver, error) {
|
||||
var err error
|
||||
|
||||
var client *http.Client
|
||||
|
|
|
@ -80,7 +80,7 @@ func (s *StepCreateInstance) Run(state multistep.StateBag) multistep.StepAction
|
|||
OmitExternalIP: config.OmitExternalIP,
|
||||
Preemptible: config.Preemptible,
|
||||
Region: config.Region,
|
||||
ServiceAccountEmail: config.account.ClientEmail,
|
||||
ServiceAccountEmail: config.Account.ClientEmail,
|
||||
Subnetwork: config.Subnetwork,
|
||||
Tags: config.Tags,
|
||||
Zone: config.Zone,
|
||||
|
|
|
@ -71,8 +71,9 @@ func (s *StepTeardownInstance) Cleanup(state multistep.StateBag) {
|
|||
if err != nil {
|
||||
ui.Error(fmt.Sprintf(
|
||||
"Error deleting disk. Please delete it manually.\n\n"+
|
||||
"Name: %s\n"+
|
||||
"Error: %s", config.InstanceName, err))
|
||||
"DiskName: %s\n" +
|
||||
"Zone: %s\n" +
|
||||
"Error: %s", config.DiskName, config.Zone, err))
|
||||
}
|
||||
|
||||
ui.Message("Disk has been deleted!")
|
||||
|
|
|
@ -1,189 +0,0 @@
|
|||
//
|
||||
// This file is automatically generated by scripts/generate-plugins.go -- Do not edit!
|
||||
//
|
||||
|
||||
package command
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/packer/packer"
|
||||
"github.com/mitchellh/packer/packer/plugin"
|
||||
|
||||
amazonchrootbuilder "github.com/mitchellh/packer/builder/amazon/chroot"
|
||||
amazonebsbuilder "github.com/mitchellh/packer/builder/amazon/ebs"
|
||||
amazoninstancebuilder "github.com/mitchellh/packer/builder/amazon/instance"
|
||||
azurearmbuilder "github.com/mitchellh/packer/builder/azure/arm"
|
||||
digitaloceanbuilder "github.com/mitchellh/packer/builder/digitalocean"
|
||||
dockerbuilder "github.com/mitchellh/packer/builder/docker"
|
||||
filebuilder "github.com/mitchellh/packer/builder/file"
|
||||
googlecomputebuilder "github.com/mitchellh/packer/builder/googlecompute"
|
||||
nullbuilder "github.com/mitchellh/packer/builder/null"
|
||||
openstackbuilder "github.com/mitchellh/packer/builder/openstack"
|
||||
parallelsisobuilder "github.com/mitchellh/packer/builder/parallels/iso"
|
||||
parallelspvmbuilder "github.com/mitchellh/packer/builder/parallels/pvm"
|
||||
qemubuilder "github.com/mitchellh/packer/builder/qemu"
|
||||
virtualboxisobuilder "github.com/mitchellh/packer/builder/virtualbox/iso"
|
||||
virtualboxovfbuilder "github.com/mitchellh/packer/builder/virtualbox/ovf"
|
||||
vmwareisobuilder "github.com/mitchellh/packer/builder/vmware/iso"
|
||||
vmwarevmxbuilder "github.com/mitchellh/packer/builder/vmware/vmx"
|
||||
amazonimportpostprocessor "github.com/mitchellh/packer/post-processor/amazon-import"
|
||||
artificepostprocessor "github.com/mitchellh/packer/post-processor/artifice"
|
||||
atlaspostprocessor "github.com/mitchellh/packer/post-processor/atlas"
|
||||
checksumpostprocessor "github.com/mitchellh/packer/post-processor/checksum"
|
||||
compresspostprocessor "github.com/mitchellh/packer/post-processor/compress"
|
||||
dockerimportpostprocessor "github.com/mitchellh/packer/post-processor/docker-import"
|
||||
dockerpushpostprocessor "github.com/mitchellh/packer/post-processor/docker-push"
|
||||
dockersavepostprocessor "github.com/mitchellh/packer/post-processor/docker-save"
|
||||
dockertagpostprocessor "github.com/mitchellh/packer/post-processor/docker-tag"
|
||||
manifestpostprocessor "github.com/mitchellh/packer/post-processor/manifest"
|
||||
shelllocalpostprocessor "github.com/mitchellh/packer/post-processor/shell-local"
|
||||
vagrantpostprocessor "github.com/mitchellh/packer/post-processor/vagrant"
|
||||
vagrantcloudpostprocessor "github.com/mitchellh/packer/post-processor/vagrant-cloud"
|
||||
vspherepostprocessor "github.com/mitchellh/packer/post-processor/vsphere"
|
||||
ansibleprovisioner "github.com/mitchellh/packer/provisioner/ansible"
|
||||
ansiblelocalprovisioner "github.com/mitchellh/packer/provisioner/ansible-local"
|
||||
chefclientprovisioner "github.com/mitchellh/packer/provisioner/chef-client"
|
||||
chefsoloprovisioner "github.com/mitchellh/packer/provisioner/chef-solo"
|
||||
fileprovisioner "github.com/mitchellh/packer/provisioner/file"
|
||||
powershellprovisioner "github.com/mitchellh/packer/provisioner/powershell"
|
||||
puppetmasterlessprovisioner "github.com/mitchellh/packer/provisioner/puppet-masterless"
|
||||
puppetserverprovisioner "github.com/mitchellh/packer/provisioner/puppet-server"
|
||||
saltmasterlessprovisioner "github.com/mitchellh/packer/provisioner/salt-masterless"
|
||||
shellprovisioner "github.com/mitchellh/packer/provisioner/shell"
|
||||
shelllocalprovisioner "github.com/mitchellh/packer/provisioner/shell-local"
|
||||
windowsrestartprovisioner "github.com/mitchellh/packer/provisioner/windows-restart"
|
||||
windowsshellprovisioner "github.com/mitchellh/packer/provisioner/windows-shell"
|
||||
)
|
||||
|
||||
type PluginCommand struct {
|
||||
Meta
|
||||
}
|
||||
|
||||
var Builders = map[string]packer.Builder{
|
||||
"amazon-chroot": new(amazonchrootbuilder.Builder),
|
||||
"amazon-ebs": new(amazonebsbuilder.Builder),
|
||||
"amazon-instance": new(amazoninstancebuilder.Builder),
|
||||
"azure-arm": new(azurearmbuilder.Builder),
|
||||
"digitalocean": new(digitaloceanbuilder.Builder),
|
||||
"docker": new(dockerbuilder.Builder),
|
||||
"file": new(filebuilder.Builder),
|
||||
"googlecompute": new(googlecomputebuilder.Builder),
|
||||
"null": new(nullbuilder.Builder),
|
||||
"openstack": new(openstackbuilder.Builder),
|
||||
"parallels-iso": new(parallelsisobuilder.Builder),
|
||||
"parallels-pvm": new(parallelspvmbuilder.Builder),
|
||||
"qemu": new(qemubuilder.Builder),
|
||||
"virtualbox-iso": new(virtualboxisobuilder.Builder),
|
||||
"virtualbox-ovf": new(virtualboxovfbuilder.Builder),
|
||||
"vmware-iso": new(vmwareisobuilder.Builder),
|
||||
"vmware-vmx": new(vmwarevmxbuilder.Builder),
|
||||
}
|
||||
|
||||
var Provisioners = map[string]packer.Provisioner{
|
||||
"ansible": new(ansibleprovisioner.Provisioner),
|
||||
"ansible-local": new(ansiblelocalprovisioner.Provisioner),
|
||||
"chef-client": new(chefclientprovisioner.Provisioner),
|
||||
"chef-solo": new(chefsoloprovisioner.Provisioner),
|
||||
"file": new(fileprovisioner.Provisioner),
|
||||
"powershell": new(powershellprovisioner.Provisioner),
|
||||
"puppet-masterless": new(puppetmasterlessprovisioner.Provisioner),
|
||||
"puppet-server": new(puppetserverprovisioner.Provisioner),
|
||||
"salt-masterless": new(saltmasterlessprovisioner.Provisioner),
|
||||
"shell": new(shellprovisioner.Provisioner),
|
||||
"shell-local": new(shelllocalprovisioner.Provisioner),
|
||||
"windows-restart": new(windowsrestartprovisioner.Provisioner),
|
||||
"windows-shell": new(windowsshellprovisioner.Provisioner),
|
||||
}
|
||||
|
||||
var PostProcessors = map[string]packer.PostProcessor{
|
||||
"amazon-import": new(amazonimportpostprocessor.PostProcessor),
|
||||
"artifice": new(artificepostprocessor.PostProcessor),
|
||||
"atlas": new(atlaspostprocessor.PostProcessor),
|
||||
"checksum": new(checksumpostprocessor.PostProcessor),
|
||||
"compress": new(compresspostprocessor.PostProcessor),
|
||||
"docker-import": new(dockerimportpostprocessor.PostProcessor),
|
||||
"docker-push": new(dockerpushpostprocessor.PostProcessor),
|
||||
"docker-save": new(dockersavepostprocessor.PostProcessor),
|
||||
"docker-tag": new(dockertagpostprocessor.PostProcessor),
|
||||
"manifest": new(manifestpostprocessor.PostProcessor),
|
||||
"shell-local": new(shelllocalpostprocessor.PostProcessor),
|
||||
"vagrant": new(vagrantpostprocessor.PostProcessor),
|
||||
"vagrant-cloud": new(vagrantcloudpostprocessor.PostProcessor),
|
||||
"vsphere": new(vspherepostprocessor.PostProcessor),
|
||||
}
|
||||
|
||||
var pluginRegexp = regexp.MustCompile("packer-(builder|post-processor|provisioner)-(.+)")
|
||||
|
||||
func (c *PluginCommand) Run(args []string) int {
|
||||
// This is an internal call (users should not call this directly) so we're
|
||||
// not going to do much input validation. If there's a problem we'll often
|
||||
// just crash. Error handling should be added to facilitate debugging.
|
||||
log.Printf("args: %#v", args)
|
||||
if len(args) != 1 {
|
||||
c.Ui.Error("Wrong number of args")
|
||||
return 1
|
||||
}
|
||||
|
||||
// Plugin will match something like "packer-builder-amazon-ebs"
|
||||
parts := pluginRegexp.FindStringSubmatch(args[0])
|
||||
if len(parts) != 3 {
|
||||
c.Ui.Error(fmt.Sprintf("Error parsing plugin argument [DEBUG]: %#v", parts))
|
||||
return 1
|
||||
}
|
||||
pluginType := parts[1] // capture group 1 (builder|post-processor|provisioner)
|
||||
pluginName := parts[2] // capture group 2 (.+)
|
||||
|
||||
server, err := plugin.Server()
|
||||
if err != nil {
|
||||
c.Ui.Error(fmt.Sprintf("Error starting plugin server: %s", err))
|
||||
return 1
|
||||
}
|
||||
|
||||
switch pluginType {
|
||||
case "builder":
|
||||
builder, found := Builders[pluginName]
|
||||
if !found {
|
||||
c.Ui.Error(fmt.Sprintf("Could not load builder: %s", pluginName))
|
||||
return 1
|
||||
}
|
||||
server.RegisterBuilder(builder)
|
||||
case "provisioner":
|
||||
provisioner, found := Provisioners[pluginName]
|
||||
if !found {
|
||||
c.Ui.Error(fmt.Sprintf("Could not load provisioner: %s", pluginName))
|
||||
return 1
|
||||
}
|
||||
server.RegisterProvisioner(provisioner)
|
||||
case "post-processor":
|
||||
postProcessor, found := PostProcessors[pluginName]
|
||||
if !found {
|
||||
c.Ui.Error(fmt.Sprintf("Could not load post-processor: %s", pluginName))
|
||||
return 1
|
||||
}
|
||||
server.RegisterPostProcessor(postProcessor)
|
||||
}
|
||||
|
||||
server.Serve()
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
func (*PluginCommand) Help() string {
|
||||
helpText := `
|
||||
Usage: packer plugin PLUGIN
|
||||
|
||||
Runs an internally-compiled version of a plugin from the packer binary.
|
||||
|
||||
NOTE: this is an internal command and you should not call it yourself.
|
||||
`
|
||||
|
||||
return strings.TrimSpace(helpText)
|
||||
}
|
||||
|
||||
func (c *PluginCommand) Synopsis() string {
|
||||
return "internal plugin command"
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
package googlecomputeexport
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
const BuilderId = "packer.post-processor.googlecompute-export"
|
||||
|
||||
type Artifact struct {
|
||||
paths []string
|
||||
}
|
||||
|
||||
func (*Artifact) BuilderId() string {
|
||||
return BuilderId
|
||||
}
|
||||
|
||||
func (*Artifact) Id() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (a *Artifact) Files() []string {
|
||||
pathsCopy := make([]string, len(a.paths))
|
||||
copy(pathsCopy, a.paths)
|
||||
return pathsCopy
|
||||
}
|
||||
|
||||
func (a *Artifact) String() string {
|
||||
return fmt.Sprintf("Exported artifacts in: %s", a.paths)
|
||||
}
|
||||
|
||||
func (*Artifact) State(name string) interface{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *Artifact) Destroy() error {
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
package googlecomputeexport
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/multistep"
|
||||
"github.com/mitchellh/packer/builder/googlecompute"
|
||||
"github.com/mitchellh/packer/common"
|
||||
"github.com/mitchellh/packer/helper/config"
|
||||
"github.com/mitchellh/packer/packer"
|
||||
"github.com/mitchellh/packer/template/interpolate"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
common.PackerConfig `mapstructure:",squash"`
|
||||
|
||||
Paths []string `mapstructure:"paths"`
|
||||
KeepOriginalImage bool `mapstructure:"keep_input_artifact"`
|
||||
|
||||
ctx interpolate.Context
|
||||
}
|
||||
|
||||
type PostProcessor struct {
|
||||
config Config
|
||||
runner multistep.Runner
|
||||
}
|
||||
|
||||
func (p *PostProcessor) Configure(raws ...interface{}) error {
|
||||
err := config.Decode(&p.config, &config.DecodeOpts{
|
||||
Interpolate: true,
|
||||
InterpolateContext: &p.config.ctx,
|
||||
}, raws...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (packer.Artifact, bool, error) {
|
||||
ui.Say("Starting googlecompute-export...")
|
||||
ui.Say(fmt.Sprintf("Exporting image to destinations: %v", p.config.Paths))
|
||||
if artifact.BuilderId() != googlecompute.BuilderId {
|
||||
err := fmt.Errorf(
|
||||
"Unknown artifact type: %s\nCan only export from Google Compute Engine builder artifacts.",
|
||||
artifact.BuilderId())
|
||||
return nil, p.config.KeepOriginalImage, err
|
||||
}
|
||||
|
||||
result := &Artifact{paths: p.config.Paths}
|
||||
|
||||
if len(p.config.Paths) > 0 {
|
||||
accountKeyFilePath := artifact.State("AccountFilePath").(string)
|
||||
imageName := artifact.State("ImageName").(string)
|
||||
imageSizeGb := artifact.State("ImageSizeGb").(int64)
|
||||
projectId := artifact.State("ProjectId").(string)
|
||||
zone := artifact.State("BuildZone").(string)
|
||||
|
||||
// Set up instance configuration.
|
||||
instanceName := fmt.Sprintf("%s-exporter", artifact.Id())
|
||||
metadata := map[string]string{
|
||||
"image_name": imageName,
|
||||
"name": instanceName,
|
||||
"paths": strings.Join(p.config.Paths, " "),
|
||||
"startup-script": StartupScript,
|
||||
"zone": zone,
|
||||
}
|
||||
exporterConfig := googlecompute.Config{
|
||||
InstanceName: instanceName,
|
||||
SourceImageProjectId: "debian-cloud",
|
||||
SourceImage: "debian-8-jessie-v20160629",
|
||||
DiskName: instanceName,
|
||||
DiskSizeGb: imageSizeGb + 10,
|
||||
DiskType: "pd-standard",
|
||||
Metadata: metadata,
|
||||
MachineType: "n1-standard-4",
|
||||
Zone: zone,
|
||||
Network: "default",
|
||||
RawStateTimeout: "5m",
|
||||
}
|
||||
exporterConfig.CalcTimeout()
|
||||
|
||||
// Set up credentials and GCE driver.
|
||||
b, err := ioutil.ReadFile(accountKeyFilePath)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Error fetching account credentials: %s", err)
|
||||
return nil, p.config.KeepOriginalImage, err
|
||||
}
|
||||
accountKeyContents := string(b)
|
||||
googlecompute.ProcessAccountFile(&exporterConfig.Account, accountKeyContents)
|
||||
driver, err := googlecompute.NewDriverGCE(ui, projectId, &exporterConfig.Account)
|
||||
if err != nil {
|
||||
return nil, p.config.KeepOriginalImage, err
|
||||
}
|
||||
|
||||
// Set up the state.
|
||||
state := new(multistep.BasicStateBag)
|
||||
state.Put("config", &exporterConfig)
|
||||
state.Put("driver", driver)
|
||||
state.Put("ui", ui)
|
||||
|
||||
// Build the steps.
|
||||
steps := []multistep.Step{
|
||||
&googlecompute.StepCreateSSHKey{
|
||||
Debug: p.config.PackerDebug,
|
||||
DebugKeyPath: fmt.Sprintf("gce_%s.pem", p.config.PackerBuildName),
|
||||
},
|
||||
&googlecompute.StepCreateInstance{
|
||||
Debug: p.config.PackerDebug,
|
||||
},
|
||||
new(googlecompute.StepWaitInstanceStartup),
|
||||
new(googlecompute.StepTeardownInstance),
|
||||
}
|
||||
|
||||
// Run the steps.
|
||||
if p.config.PackerDebug {
|
||||
p.runner = &multistep.DebugRunner{
|
||||
Steps: steps,
|
||||
PauseFn: common.MultistepDebugFn(ui),
|
||||
}
|
||||
} else {
|
||||
p.runner = &multistep.BasicRunner{Steps: steps}
|
||||
}
|
||||
p.runner.Run(state)
|
||||
}
|
||||
|
||||
return result, p.config.KeepOriginalImage, nil
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
package googlecomputeexport
|
|
@ -0,0 +1,44 @@
|
|||
package googlecomputeexport
|
||||
|
||||
var StartupScript string = `#!/bin/sh
|
||||
|
||||
GetMetadata () {
|
||||
echo "$(curl -f -H "Metadata-Flavor: Google" http://metadata/computeMetadata/v1/instance/attributes/$1 2> /dev/null)"
|
||||
}
|
||||
IMAGENAME=$(GetMetadata image_name)
|
||||
NAME=$(GetMetadata name)
|
||||
DISKNAME=${NAME}-toexport
|
||||
PATHS=$(GetMetadata paths)
|
||||
ZONE=$(GetMetadata zone)
|
||||
|
||||
echo "####### Export configuration #######"
|
||||
echo "Image name - ${IMAGENAME}"
|
||||
echo "Instance name - ${NAME}"
|
||||
echo "Instance zone - ${ZONE}"
|
||||
echo "Disk name - ${DISKNAME}"
|
||||
echo "Export paths - ${PATHS}"
|
||||
echo "####################################"
|
||||
|
||||
echo "Creating disk from image to be exported..."
|
||||
gcloud compute disks create ${DISKNAME} --image ${IMAGENAME} --zone ${ZONE}
|
||||
echo "Attaching disk..."
|
||||
gcloud compute instances attach-disk ${NAME} --disk ${DISKNAME} --device-name toexport --zone ${ZONE}
|
||||
|
||||
echo "Dumping disk..."
|
||||
dd if=/dev/disk/by-id/google-toexport of=disk.raw bs=4096 conv=sparse
|
||||
echo "Compressing and tar'ing disk image..."
|
||||
tar -czf root.tar.gz disk.raw
|
||||
|
||||
echo "Detaching disk..."
|
||||
gcloud compute instances detach-disk ${NAME} --disk ${DISKNAME} --zone ${ZONE}
|
||||
echo "Deleting disk..."
|
||||
gcloud compute disks delete ${DISKNAME} --zone ${ZONE}
|
||||
|
||||
for i in ${PATHS}; do
|
||||
echo "Uploading tar'ed disk image to ${i}..."
|
||||
gsutil -o GSUtil:parallel_composite_upload_threshold=100M cp root.tar.gz ${i}
|
||||
LOGDEST="${i}.exporter.log"
|
||||
echo "Uploading exporter log to ${LOGDEST}..."
|
||||
gsutil -h "Content-Type:text/plain" cp /var/log/daemon.log ${LOGDEST}
|
||||
done
|
||||
`
|
|
@ -82,7 +82,7 @@ repackage an existing GCE image. The account_file is obtained in the previous
|
|||
section. If it parses as JSON it is assumed to be the file itself, otherwise it
|
||||
is assumed to be the path to the file containing the JSON.
|
||||
|
||||
``` {.javascript}
|
||||
``` {.json}
|
||||
{
|
||||
"builders": [{
|
||||
"type": "googlecompute",
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
---
|
||||
description: |
|
||||
The Google Compute Image Exporter post-processor exports an image from a Packer
|
||||
googlecompute builder run and uploads it to Google Cloud Storage. The exported
|
||||
images can be easily shared and uploaded to other Google Cloud Projects.
|
||||
layout: docs
|
||||
page_title: 'Google Compute Image Exporter'
|
||||
...
|
||||
|
||||
# Google Compoute Image Exporter Post-Processor
|
||||
|
||||
Type: `googlecompute-export`
|
||||
|
||||
The Google Compute Image Exporter post-processor exports the resultant image from a
|
||||
googlecompute build as a gzipped tarball to Google Cloud Storage (GCS).
|
||||
|
||||
The exporter uses the same Google Cloud Platform (GCP) project and authentication
|
||||
credentials as the googlecompute build that produced the image. A temporary VM is
|
||||
started in the GCP project using these credentials. The VM mounts the built image as
|
||||
a disk then dumps, compresses, and tars the image. The VM then uploads the tarball
|
||||
to the provided GCS `paths` using the same credentials.
|
||||
|
||||
As such, the authentication credentials that built the image must have write
|
||||
permissions to the GCS `paths`.
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
### Required
|
||||
|
||||
- `paths` (list of string) - The list of GCS paths, e.g.
|
||||
'gs://mybucket/path/to/file.tar.gz', where the image will be exported.
|
||||
|
||||
### Optional
|
||||
|
||||
- `keep_input_artifact` (bool) - If true, do not delete the Google Compute Engine
|
||||
(GCE) image being exported.
|
||||
|
||||
## Basic Example
|
||||
|
||||
The following example builds a GCE image in the project, `my-project`, with an
|
||||
account whose keyfile is `account.json`. After the image build, a temporary VM will
|
||||
be created to export the image as a gzipped tarball to
|
||||
`gs://mybucket1/path/to/file1.tar.gz` and `gs://mybucket2/path/to/file2.tar.gz`.
|
||||
`keep_input_artifact` is true, so the GCE image won't be deleted after the export.
|
||||
|
||||
In order for this example to work, the account associated with `account.json` must
|
||||
have write access to both `gs://mybucket1/path/to/file1.tar.gz` and
|
||||
`gs://mybucket2/path/to/file2.tar.gz`.
|
||||
|
||||
``` {.json}
|
||||
{
|
||||
"builders": [
|
||||
{
|
||||
"type": "googlecompute",
|
||||
"account_file": "account.json",
|
||||
"project_id": "my-project",
|
||||
"source_image": "debian-7-wheezy-v20150127",
|
||||
"zone": "us-central1-a"
|
||||
}
|
||||
],
|
||||
"post-processors": [
|
||||
{
|
||||
"type": "googlecompute-export",
|
||||
"paths": [
|
||||
"gs://mybucket1/path/to/file1.tar.gz",
|
||||
"gs://mybucket2/path/to/file2.tar.gz"
|
||||
]
|
||||
"keep_input_artifact": true
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
Loading…
Reference in New Issue