From 55b5a45ae48633636c873ee58c1ad4c7e77b8e0a Mon Sep 17 00:00:00 2001 From: crunk1 Date: Thu, 7 Jul 2016 17:50:46 -0400 Subject: [PATCH 1/3] Created googlecompute-export post-processor. Modified the googlecompute builder to pass a few configuration values to its resulting artifact. These values can then be used in googlecompute-export post-processor. Added documentation and changed authentication stuff on temporary VM. --- builder/googlecompute/account.go | 4 +- builder/googlecompute/artifact.go | 17 +- builder/googlecompute/builder.go | 3 +- builder/googlecompute/config.go | 19 +- builder/googlecompute/driver_gce.go | 2 +- builder/googlecompute/step_create_instance.go | 2 +- .../googlecompute/step_teardown_instance.go | 5 +- command/plugin.go | 189 ------------------ .../googlecompute-export/artifact.go | 37 ++++ .../googlecompute-export/post-processor.go | 130 ++++++++++++ .../post-processor_test.go | 1 + .../googlecompute-export/startup.go | 44 ++++ .../docs/builders/googlecompute.html.md | 2 +- .../googlecompute-export.html.md | 73 +++++++ 14 files changed, 323 insertions(+), 205 deletions(-) delete mode 100644 command/plugin.go create mode 100644 post-processor/googlecompute-export/artifact.go create mode 100644 post-processor/googlecompute-export/post-processor.go create mode 100644 post-processor/googlecompute-export/post-processor_test.go create mode 100644 post-processor/googlecompute-export/startup.go create mode 100644 website/source/docs/post-processors/googlecompute-export.html.md diff --git a/builder/googlecompute/account.go b/builder/googlecompute/account.go index ea94b11f1..75734e279 100644 --- a/builder/googlecompute/account.go +++ b/builder/googlecompute/account.go @@ -9,7 +9,7 @@ import ( ) // accountFile represents the structure of the account file JSON file. -type accountFile struct { +type AccountFile struct { PrivateKeyId string `json:"private_key_id"` PrivateKey string `json:"private_key"` ClientEmail string `json:"client_email"` @@ -22,7 +22,7 @@ func parseJSON(result interface{}, text string) error { return dec.Decode(result) } -func processAccountFile(account_file *accountFile, text string) error { +func ProcessAccountFile(account_file *AccountFile, text string) error { // Assume text is a JSON string if err := parseJSON(account_file, text); err != nil { // If text was not JSON, assume it is a file path instead diff --git a/builder/googlecompute/artifact.go b/builder/googlecompute/artifact.go index 128db103b..86d8aa9e1 100644 --- a/builder/googlecompute/artifact.go +++ b/builder/googlecompute/artifact.go @@ -7,8 +7,9 @@ import ( // Artifact represents a GCE image as the result of a Packer build. type Artifact struct { - image Image - driver Driver + image Image + driver Driver + config *Config } // BuilderId returns the builder Id. @@ -39,5 +40,17 @@ func (a *Artifact) String() string { } func (a *Artifact) State(name string) interface{} { + switch name { + case "ImageName": + return a.image.Name + case "ImageSizeGb": + return a.image.SizeGb + case "AccountFilePath": + return a.config.AccountFile + case "ProjectId": + return a.config.ProjectId + case "BuildZone": + return a.config.Zone + } return nil } diff --git a/builder/googlecompute/builder.go b/builder/googlecompute/builder.go index 987672a90..d4930e033 100644 --- a/builder/googlecompute/builder.go +++ b/builder/googlecompute/builder.go @@ -36,7 +36,7 @@ func (b *Builder) Prepare(raws ...interface{}) ([]string, error) { // representing a GCE machine image. func (b *Builder) Run(ui packer.Ui, hook packer.Hook, cache packer.Cache) (packer.Artifact, error) { driver, err := NewDriverGCE( - ui, b.config.ProjectId, &b.config.account) + ui, b.config.ProjectId, &b.config.Account) if err != nil { return nil, err } @@ -95,6 +95,7 @@ func (b *Builder) Run(ui packer.Ui, hook packer.Hook, cache packer.Cache) (packe artifact := &Artifact{ image: state.Get("image").(Image), driver: driver, + config: b.config, } return artifact, nil } diff --git a/builder/googlecompute/config.go b/builder/googlecompute/config.go index bd08f1b2e..7c5eb6956 100644 --- a/builder/googlecompute/config.go +++ b/builder/googlecompute/config.go @@ -49,7 +49,7 @@ type Config struct { UseInternalIP bool `mapstructure:"use_internal_ip"` Zone string `mapstructure:"zone"` - account accountFile + Account AccountFile privateKeyBytes []byte stateTimeout time.Duration ctx interpolate.Context @@ -157,15 +157,13 @@ func NewConfig(raws ...interface{}) (*Config, []string, error) { c.Region = region } - stateTimeout, err := time.ParseDuration(c.RawStateTimeout) + err = c.CalcTimeout() if err != nil { - errs = packer.MultiErrorAppend( - errs, fmt.Errorf("Failed parsing state_timeout: %s", err)) + errs = packer.MultiErrorAppend(errs, err) } - c.stateTimeout = stateTimeout if c.AccountFile != "" { - if err := processAccountFile(&c.account, c.AccountFile); err != nil { + if err := ProcessAccountFile(&c.Account, c.AccountFile); err != nil { errs = packer.MultiErrorAppend(errs, err) } } @@ -185,3 +183,12 @@ func NewConfig(raws ...interface{}) (*Config, []string, error) { return c, nil, nil } + +func (c *Config) CalcTimeout() error { + stateTimeout, err := time.ParseDuration(c.RawStateTimeout) + if err != nil { + return fmt.Errorf("Failed parsing state_timeout: %s", err) + } + c.stateTimeout = stateTimeout + return nil +} \ No newline at end of file diff --git a/builder/googlecompute/driver_gce.go b/builder/googlecompute/driver_gce.go index 67b4c7fe8..d8df51e12 100644 --- a/builder/googlecompute/driver_gce.go +++ b/builder/googlecompute/driver_gce.go @@ -26,7 +26,7 @@ type driverGCE struct { var DriverScopes = []string{"https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.full_control"} -func NewDriverGCE(ui packer.Ui, p string, a *accountFile) (Driver, error) { +func NewDriverGCE(ui packer.Ui, p string, a *AccountFile) (Driver, error) { var err error var client *http.Client diff --git a/builder/googlecompute/step_create_instance.go b/builder/googlecompute/step_create_instance.go index 114cc7b9d..bf4a013e8 100644 --- a/builder/googlecompute/step_create_instance.go +++ b/builder/googlecompute/step_create_instance.go @@ -80,7 +80,7 @@ func (s *StepCreateInstance) Run(state multistep.StateBag) multistep.StepAction OmitExternalIP: config.OmitExternalIP, Preemptible: config.Preemptible, Region: config.Region, - ServiceAccountEmail: config.account.ClientEmail, + ServiceAccountEmail: config.Account.ClientEmail, Subnetwork: config.Subnetwork, Tags: config.Tags, Zone: config.Zone, diff --git a/builder/googlecompute/step_teardown_instance.go b/builder/googlecompute/step_teardown_instance.go index 42ad83a5d..1af2be8a4 100644 --- a/builder/googlecompute/step_teardown_instance.go +++ b/builder/googlecompute/step_teardown_instance.go @@ -71,8 +71,9 @@ func (s *StepTeardownInstance) Cleanup(state multistep.StateBag) { if err != nil { ui.Error(fmt.Sprintf( "Error deleting disk. Please delete it manually.\n\n"+ - "Name: %s\n"+ - "Error: %s", config.InstanceName, err)) + "DiskName: %s\n" + + "Zone: %s\n" + + "Error: %s", config.DiskName, config.Zone, err)) } ui.Message("Disk has been deleted!") diff --git a/command/plugin.go b/command/plugin.go deleted file mode 100644 index 729edeba9..000000000 --- a/command/plugin.go +++ /dev/null @@ -1,189 +0,0 @@ -// -// This file is automatically generated by scripts/generate-plugins.go -- Do not edit! -// - -package command - -import ( - "fmt" - "log" - "regexp" - "strings" - - "github.com/mitchellh/packer/packer" - "github.com/mitchellh/packer/packer/plugin" - - amazonchrootbuilder "github.com/mitchellh/packer/builder/amazon/chroot" - amazonebsbuilder "github.com/mitchellh/packer/builder/amazon/ebs" - amazoninstancebuilder "github.com/mitchellh/packer/builder/amazon/instance" - azurearmbuilder "github.com/mitchellh/packer/builder/azure/arm" - digitaloceanbuilder "github.com/mitchellh/packer/builder/digitalocean" - dockerbuilder "github.com/mitchellh/packer/builder/docker" - filebuilder "github.com/mitchellh/packer/builder/file" - googlecomputebuilder "github.com/mitchellh/packer/builder/googlecompute" - nullbuilder "github.com/mitchellh/packer/builder/null" - openstackbuilder "github.com/mitchellh/packer/builder/openstack" - parallelsisobuilder "github.com/mitchellh/packer/builder/parallels/iso" - parallelspvmbuilder "github.com/mitchellh/packer/builder/parallels/pvm" - qemubuilder "github.com/mitchellh/packer/builder/qemu" - virtualboxisobuilder "github.com/mitchellh/packer/builder/virtualbox/iso" - virtualboxovfbuilder "github.com/mitchellh/packer/builder/virtualbox/ovf" - vmwareisobuilder "github.com/mitchellh/packer/builder/vmware/iso" - vmwarevmxbuilder "github.com/mitchellh/packer/builder/vmware/vmx" - amazonimportpostprocessor "github.com/mitchellh/packer/post-processor/amazon-import" - artificepostprocessor "github.com/mitchellh/packer/post-processor/artifice" - atlaspostprocessor "github.com/mitchellh/packer/post-processor/atlas" - checksumpostprocessor "github.com/mitchellh/packer/post-processor/checksum" - compresspostprocessor "github.com/mitchellh/packer/post-processor/compress" - dockerimportpostprocessor "github.com/mitchellh/packer/post-processor/docker-import" - dockerpushpostprocessor "github.com/mitchellh/packer/post-processor/docker-push" - dockersavepostprocessor "github.com/mitchellh/packer/post-processor/docker-save" - dockertagpostprocessor "github.com/mitchellh/packer/post-processor/docker-tag" - manifestpostprocessor "github.com/mitchellh/packer/post-processor/manifest" - shelllocalpostprocessor "github.com/mitchellh/packer/post-processor/shell-local" - vagrantpostprocessor "github.com/mitchellh/packer/post-processor/vagrant" - vagrantcloudpostprocessor "github.com/mitchellh/packer/post-processor/vagrant-cloud" - vspherepostprocessor "github.com/mitchellh/packer/post-processor/vsphere" - ansibleprovisioner "github.com/mitchellh/packer/provisioner/ansible" - ansiblelocalprovisioner "github.com/mitchellh/packer/provisioner/ansible-local" - chefclientprovisioner "github.com/mitchellh/packer/provisioner/chef-client" - chefsoloprovisioner "github.com/mitchellh/packer/provisioner/chef-solo" - fileprovisioner "github.com/mitchellh/packer/provisioner/file" - powershellprovisioner "github.com/mitchellh/packer/provisioner/powershell" - puppetmasterlessprovisioner "github.com/mitchellh/packer/provisioner/puppet-masterless" - puppetserverprovisioner "github.com/mitchellh/packer/provisioner/puppet-server" - saltmasterlessprovisioner "github.com/mitchellh/packer/provisioner/salt-masterless" - shellprovisioner "github.com/mitchellh/packer/provisioner/shell" - shelllocalprovisioner "github.com/mitchellh/packer/provisioner/shell-local" - windowsrestartprovisioner "github.com/mitchellh/packer/provisioner/windows-restart" - windowsshellprovisioner "github.com/mitchellh/packer/provisioner/windows-shell" -) - -type PluginCommand struct { - Meta -} - -var Builders = map[string]packer.Builder{ - "amazon-chroot": new(amazonchrootbuilder.Builder), - "amazon-ebs": new(amazonebsbuilder.Builder), - "amazon-instance": new(amazoninstancebuilder.Builder), - "azure-arm": new(azurearmbuilder.Builder), - "digitalocean": new(digitaloceanbuilder.Builder), - "docker": new(dockerbuilder.Builder), - "file": new(filebuilder.Builder), - "googlecompute": new(googlecomputebuilder.Builder), - "null": new(nullbuilder.Builder), - "openstack": new(openstackbuilder.Builder), - "parallels-iso": new(parallelsisobuilder.Builder), - "parallels-pvm": new(parallelspvmbuilder.Builder), - "qemu": new(qemubuilder.Builder), - "virtualbox-iso": new(virtualboxisobuilder.Builder), - "virtualbox-ovf": new(virtualboxovfbuilder.Builder), - "vmware-iso": new(vmwareisobuilder.Builder), - "vmware-vmx": new(vmwarevmxbuilder.Builder), -} - -var Provisioners = map[string]packer.Provisioner{ - "ansible": new(ansibleprovisioner.Provisioner), - "ansible-local": new(ansiblelocalprovisioner.Provisioner), - "chef-client": new(chefclientprovisioner.Provisioner), - "chef-solo": new(chefsoloprovisioner.Provisioner), - "file": new(fileprovisioner.Provisioner), - "powershell": new(powershellprovisioner.Provisioner), - "puppet-masterless": new(puppetmasterlessprovisioner.Provisioner), - "puppet-server": new(puppetserverprovisioner.Provisioner), - "salt-masterless": new(saltmasterlessprovisioner.Provisioner), - "shell": new(shellprovisioner.Provisioner), - "shell-local": new(shelllocalprovisioner.Provisioner), - "windows-restart": new(windowsrestartprovisioner.Provisioner), - "windows-shell": new(windowsshellprovisioner.Provisioner), -} - -var PostProcessors = map[string]packer.PostProcessor{ - "amazon-import": new(amazonimportpostprocessor.PostProcessor), - "artifice": new(artificepostprocessor.PostProcessor), - "atlas": new(atlaspostprocessor.PostProcessor), - "checksum": new(checksumpostprocessor.PostProcessor), - "compress": new(compresspostprocessor.PostProcessor), - "docker-import": new(dockerimportpostprocessor.PostProcessor), - "docker-push": new(dockerpushpostprocessor.PostProcessor), - "docker-save": new(dockersavepostprocessor.PostProcessor), - "docker-tag": new(dockertagpostprocessor.PostProcessor), - "manifest": new(manifestpostprocessor.PostProcessor), - "shell-local": new(shelllocalpostprocessor.PostProcessor), - "vagrant": new(vagrantpostprocessor.PostProcessor), - "vagrant-cloud": new(vagrantcloudpostprocessor.PostProcessor), - "vsphere": new(vspherepostprocessor.PostProcessor), -} - -var pluginRegexp = regexp.MustCompile("packer-(builder|post-processor|provisioner)-(.+)") - -func (c *PluginCommand) Run(args []string) int { - // This is an internal call (users should not call this directly) so we're - // not going to do much input validation. If there's a problem we'll often - // just crash. Error handling should be added to facilitate debugging. - log.Printf("args: %#v", args) - if len(args) != 1 { - c.Ui.Error("Wrong number of args") - return 1 - } - - // Plugin will match something like "packer-builder-amazon-ebs" - parts := pluginRegexp.FindStringSubmatch(args[0]) - if len(parts) != 3 { - c.Ui.Error(fmt.Sprintf("Error parsing plugin argument [DEBUG]: %#v", parts)) - return 1 - } - pluginType := parts[1] // capture group 1 (builder|post-processor|provisioner) - pluginName := parts[2] // capture group 2 (.+) - - server, err := plugin.Server() - if err != nil { - c.Ui.Error(fmt.Sprintf("Error starting plugin server: %s", err)) - return 1 - } - - switch pluginType { - case "builder": - builder, found := Builders[pluginName] - if !found { - c.Ui.Error(fmt.Sprintf("Could not load builder: %s", pluginName)) - return 1 - } - server.RegisterBuilder(builder) - case "provisioner": - provisioner, found := Provisioners[pluginName] - if !found { - c.Ui.Error(fmt.Sprintf("Could not load provisioner: %s", pluginName)) - return 1 - } - server.RegisterProvisioner(provisioner) - case "post-processor": - postProcessor, found := PostProcessors[pluginName] - if !found { - c.Ui.Error(fmt.Sprintf("Could not load post-processor: %s", pluginName)) - return 1 - } - server.RegisterPostProcessor(postProcessor) - } - - server.Serve() - - return 0 -} - -func (*PluginCommand) Help() string { - helpText := ` -Usage: packer plugin PLUGIN - - Runs an internally-compiled version of a plugin from the packer binary. - - NOTE: this is an internal command and you should not call it yourself. -` - - return strings.TrimSpace(helpText) -} - -func (c *PluginCommand) Synopsis() string { - return "internal plugin command" -} diff --git a/post-processor/googlecompute-export/artifact.go b/post-processor/googlecompute-export/artifact.go new file mode 100644 index 000000000..12ff040a6 --- /dev/null +++ b/post-processor/googlecompute-export/artifact.go @@ -0,0 +1,37 @@ +package googlecomputeexport + +import ( + "fmt" +) + +const BuilderId = "packer.post-processor.googlecompute-export" + +type Artifact struct { + paths []string +} + +func (*Artifact) BuilderId() string { + return BuilderId +} + +func (*Artifact) Id() string { + return "" +} + +func (a *Artifact) Files() []string { + pathsCopy := make([]string, len(a.paths)) + copy(pathsCopy, a.paths) + return pathsCopy +} + +func (a *Artifact) String() string { + return fmt.Sprintf("Exported artifacts in: %s", a.paths) +} + +func (*Artifact) State(name string) interface{} { + return nil +} + +func (a *Artifact) Destroy() error { + return nil +} diff --git a/post-processor/googlecompute-export/post-processor.go b/post-processor/googlecompute-export/post-processor.go new file mode 100644 index 000000000..e835f30a9 --- /dev/null +++ b/post-processor/googlecompute-export/post-processor.go @@ -0,0 +1,130 @@ +package googlecomputeexport + +import ( + "fmt" + "io/ioutil" + "strings" + + "github.com/mitchellh/multistep" + "github.com/mitchellh/packer/builder/googlecompute" + "github.com/mitchellh/packer/common" + "github.com/mitchellh/packer/helper/config" + "github.com/mitchellh/packer/packer" + "github.com/mitchellh/packer/template/interpolate" +) + +type Config struct { + common.PackerConfig `mapstructure:",squash"` + + Paths []string `mapstructure:"paths"` + KeepOriginalImage bool `mapstructure:"keep_input_artifact"` + + ctx interpolate.Context +} + +type PostProcessor struct { + config Config + runner multistep.Runner +} + +func (p *PostProcessor) Configure(raws ...interface{}) error { + err := config.Decode(&p.config, &config.DecodeOpts{ + Interpolate: true, + InterpolateContext: &p.config.ctx, + }, raws...) + if err != nil { + return err + } + + return nil +} + +func (p *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (packer.Artifact, bool, error) { + ui.Say("Starting googlecompute-export...") + ui.Say(fmt.Sprintf("Exporting image to destinations: %v", p.config.Paths)) + if artifact.BuilderId() != googlecompute.BuilderId { + err := fmt.Errorf( + "Unknown artifact type: %s\nCan only export from Google Compute Engine builder artifacts.", + artifact.BuilderId()) + return nil, p.config.KeepOriginalImage, err + } + + result := &Artifact{paths: p.config.Paths} + + if len(p.config.Paths) > 0 { + accountKeyFilePath := artifact.State("AccountFilePath").(string) + imageName := artifact.State("ImageName").(string) + imageSizeGb := artifact.State("ImageSizeGb").(int64) + projectId := artifact.State("ProjectId").(string) + zone := artifact.State("BuildZone").(string) + + // Set up instance configuration. + instanceName := fmt.Sprintf("%s-exporter", artifact.Id()) + metadata := map[string]string{ + "image_name": imageName, + "name": instanceName, + "paths": strings.Join(p.config.Paths, " "), + "startup-script": StartupScript, + "zone": zone, + } + exporterConfig := googlecompute.Config{ + InstanceName: instanceName, + SourceImageProjectId: "debian-cloud", + SourceImage: "debian-8-jessie-v20160629", + DiskName: instanceName, + DiskSizeGb: imageSizeGb + 10, + DiskType: "pd-standard", + Metadata: metadata, + MachineType: "n1-standard-4", + Zone: zone, + Network: "default", + RawStateTimeout: "5m", + } + exporterConfig.CalcTimeout() + + // Set up credentials and GCE driver. + b, err := ioutil.ReadFile(accountKeyFilePath) + if err != nil { + err = fmt.Errorf("Error fetching account credentials: %s", err) + return nil, p.config.KeepOriginalImage, err + } + accountKeyContents := string(b) + googlecompute.ProcessAccountFile(&exporterConfig.Account, accountKeyContents) + driver, err := googlecompute.NewDriverGCE(ui, projectId, &exporterConfig.Account) + if err != nil { + return nil, p.config.KeepOriginalImage, err + } + + // Set up the state. + state := new(multistep.BasicStateBag) + state.Put("config", &exporterConfig) + state.Put("driver", driver) + state.Put("ui", ui) + + // Build the steps. + steps := []multistep.Step{ + &googlecompute.StepCreateSSHKey{ + Debug: p.config.PackerDebug, + DebugKeyPath: fmt.Sprintf("gce_%s.pem", p.config.PackerBuildName), + }, + &googlecompute.StepCreateInstance{ + Debug: p.config.PackerDebug, + }, + new(googlecompute.StepWaitInstanceStartup), + new(googlecompute.StepTeardownInstance), + } + + // Run the steps. + if p.config.PackerDebug { + p.runner = &multistep.DebugRunner{ + Steps: steps, + PauseFn: common.MultistepDebugFn(ui), + } + } else { + p.runner = &multistep.BasicRunner{Steps: steps} + } + p.runner.Run(state) + } + + return result, p.config.KeepOriginalImage, nil +} diff --git a/post-processor/googlecompute-export/post-processor_test.go b/post-processor/googlecompute-export/post-processor_test.go new file mode 100644 index 000000000..12f53e42a --- /dev/null +++ b/post-processor/googlecompute-export/post-processor_test.go @@ -0,0 +1 @@ +package googlecomputeexport diff --git a/post-processor/googlecompute-export/startup.go b/post-processor/googlecompute-export/startup.go new file mode 100644 index 000000000..0a4ab5204 --- /dev/null +++ b/post-processor/googlecompute-export/startup.go @@ -0,0 +1,44 @@ +package googlecomputeexport + +var StartupScript string = `#!/bin/sh + +GetMetadata () { + echo "$(curl -f -H "Metadata-Flavor: Google" http://metadata/computeMetadata/v1/instance/attributes/$1 2> /dev/null)" +} +IMAGENAME=$(GetMetadata image_name) +NAME=$(GetMetadata name) +DISKNAME=${NAME}-toexport +PATHS=$(GetMetadata paths) +ZONE=$(GetMetadata zone) + +echo "####### Export configuration #######" +echo "Image name - ${IMAGENAME}" +echo "Instance name - ${NAME}" +echo "Instance zone - ${ZONE}" +echo "Disk name - ${DISKNAME}" +echo "Export paths - ${PATHS}" +echo "####################################" + +echo "Creating disk from image to be exported..." +gcloud compute disks create ${DISKNAME} --image ${IMAGENAME} --zone ${ZONE} +echo "Attaching disk..." +gcloud compute instances attach-disk ${NAME} --disk ${DISKNAME} --device-name toexport --zone ${ZONE} + +echo "Dumping disk..." +dd if=/dev/disk/by-id/google-toexport of=disk.raw bs=4096 conv=sparse +echo "Compressing and tar'ing disk image..." +tar -czf root.tar.gz disk.raw + +echo "Detaching disk..." +gcloud compute instances detach-disk ${NAME} --disk ${DISKNAME} --zone ${ZONE} +echo "Deleting disk..." +gcloud compute disks delete ${DISKNAME} --zone ${ZONE} + +for i in ${PATHS}; do + echo "Uploading tar'ed disk image to ${i}..." + gsutil -o GSUtil:parallel_composite_upload_threshold=100M cp root.tar.gz ${i} + LOGDEST="${i}.exporter.log" + echo "Uploading exporter log to ${LOGDEST}..." + gsutil -h "Content-Type:text/plain" cp /var/log/daemon.log ${LOGDEST} +done +` diff --git a/website/source/docs/builders/googlecompute.html.md b/website/source/docs/builders/googlecompute.html.md index 9a651187e..d9bb6fdc3 100644 --- a/website/source/docs/builders/googlecompute.html.md +++ b/website/source/docs/builders/googlecompute.html.md @@ -82,7 +82,7 @@ repackage an existing GCE image. The account_file is obtained in the previous section. If it parses as JSON it is assumed to be the file itself, otherwise it is assumed to be the path to the file containing the JSON. -``` {.javascript} +``` {.json} { "builders": [{ "type": "googlecompute", diff --git a/website/source/docs/post-processors/googlecompute-export.html.md b/website/source/docs/post-processors/googlecompute-export.html.md new file mode 100644 index 000000000..45598e0f5 --- /dev/null +++ b/website/source/docs/post-processors/googlecompute-export.html.md @@ -0,0 +1,73 @@ +--- +description: | + The Google Compute Image Exporter post-processor exports an image from a Packer + googlecompute builder run and uploads it to Google Cloud Storage. The exported + images can be easily shared and uploaded to other Google Cloud Projects. +layout: docs +page_title: 'Google Compute Image Exporter' +... + +# Google Compoute Image Exporter Post-Processor + +Type: `googlecompute-export` + +The Google Compute Image Exporter post-processor exports the resultant image from a +googlecompute build as a gzipped tarball to Google Cloud Storage (GCS). + +The exporter uses the same Google Cloud Platform (GCP) project and authentication +credentials as the googlecompute build that produced the image. A temporary VM is +started in the GCP project using these credentials. The VM mounts the built image as +a disk then dumps, compresses, and tars the image. The VM then uploads the tarball +to the provided GCS `paths` using the same credentials. + +As such, the authentication credentials that built the image must have write +permissions to the GCS `paths`. + + +## Configuration + +### Required + +- `paths` (list of string) - The list of GCS paths, e.g. + 'gs://mybucket/path/to/file.tar.gz', where the image will be exported. + +### Optional + +- `keep_input_artifact` (bool) - If true, do not delete the Google Compute Engine + (GCE) image being exported. + +## Basic Example + +The following example builds a GCE image in the project, `my-project`, with an +account whose keyfile is `account.json`. After the image build, a temporary VM will +be created to export the image as a gzipped tarball to +`gs://mybucket1/path/to/file1.tar.gz` and `gs://mybucket2/path/to/file2.tar.gz`. +`keep_input_artifact` is true, so the GCE image won't be deleted after the export. + +In order for this example to work, the account associated with `account.json` must +have write access to both `gs://mybucket1/path/to/file1.tar.gz` and +`gs://mybucket2/path/to/file2.tar.gz`. + +``` {.json} +{ + "builders": [ + { + "type": "googlecompute", + "account_file": "account.json", + "project_id": "my-project", + "source_image": "debian-7-wheezy-v20150127", + "zone": "us-central1-a" + } + ], + "post-processors": [ + { + "type": "googlecompute-export", + "paths": [ + "gs://mybucket1/path/to/file1.tar.gz", + "gs://mybucket2/path/to/file2.tar.gz" + ] + "keep_input_artifact": true + } + ] +} +``` From d2e65e6433b45509fd0f30e69d31de1a0d291ca7 Mon Sep 17 00:00:00 2001 From: Scott Crunkleton Date: Wed, 27 Jul 2016 12:25:19 -0700 Subject: [PATCH 2/3] Generated command/plugin.go using scripts/generate-plugins.go. --- command/plugin.go | 195 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 195 insertions(+) create mode 100644 command/plugin.go diff --git a/command/plugin.go b/command/plugin.go new file mode 100644 index 000000000..4def4582b --- /dev/null +++ b/command/plugin.go @@ -0,0 +1,195 @@ +// +// This file is automatically generated by scripts/generate-plugins.go -- Do not edit! +// + +package command + +import ( + "fmt" + "log" + "regexp" + "strings" + + "github.com/mitchellh/packer/packer" + "github.com/mitchellh/packer/packer/plugin" + + amazonchrootbuilder "github.com/mitchellh/packer/builder/amazon/chroot" + amazonebsbuilder "github.com/mitchellh/packer/builder/amazon/ebs" + amazonimportpostprocessor "github.com/mitchellh/packer/post-processor/amazon-import" + amazoninstancebuilder "github.com/mitchellh/packer/builder/amazon/instance" + ansiblelocalprovisioner "github.com/mitchellh/packer/provisioner/ansible-local" + ansibleprovisioner "github.com/mitchellh/packer/provisioner/ansible" + artificepostprocessor "github.com/mitchellh/packer/post-processor/artifice" + atlaspostprocessor "github.com/mitchellh/packer/post-processor/atlas" + azurearmbuilder "github.com/mitchellh/packer/builder/azure/arm" + checksumpostprocessor "github.com/mitchellh/packer/post-processor/checksum" + chefclientprovisioner "github.com/mitchellh/packer/provisioner/chef-client" + chefsoloprovisioner "github.com/mitchellh/packer/provisioner/chef-solo" + compresspostprocessor "github.com/mitchellh/packer/post-processor/compress" + digitaloceanbuilder "github.com/mitchellh/packer/builder/digitalocean" + dockerbuilder "github.com/mitchellh/packer/builder/docker" + dockerimportpostprocessor "github.com/mitchellh/packer/post-processor/docker-import" + dockerpushpostprocessor "github.com/mitchellh/packer/post-processor/docker-push" + dockersavepostprocessor "github.com/mitchellh/packer/post-processor/docker-save" + dockertagpostprocessor "github.com/mitchellh/packer/post-processor/docker-tag" + filebuilder "github.com/mitchellh/packer/builder/file" + fileprovisioner "github.com/mitchellh/packer/provisioner/file" + googlecomputebuilder "github.com/mitchellh/packer/builder/googlecompute" + googlecomputeexportpostprocessor "github.com/mitchellh/packer/post-processor/googlecompute-export" + manifestpostprocessor "github.com/mitchellh/packer/post-processor/manifest" + nullbuilder "github.com/mitchellh/packer/builder/null" + openstackbuilder "github.com/mitchellh/packer/builder/openstack" + parallelsisobuilder "github.com/mitchellh/packer/builder/parallels/iso" + parallelspvmbuilder "github.com/mitchellh/packer/builder/parallels/pvm" + powershellprovisioner "github.com/mitchellh/packer/provisioner/powershell" + puppetmasterlessprovisioner "github.com/mitchellh/packer/provisioner/puppet-masterless" + puppetserverprovisioner "github.com/mitchellh/packer/provisioner/puppet-server" + qemubuilder "github.com/mitchellh/packer/builder/qemu" + saltmasterlessprovisioner "github.com/mitchellh/packer/provisioner/salt-masterless" + shelllocalpostprocessor "github.com/mitchellh/packer/post-processor/shell-local" + shelllocalprovisioner "github.com/mitchellh/packer/provisioner/shell-local" + shellprovisioner "github.com/mitchellh/packer/provisioner/shell" + vagrantcloudpostprocessor "github.com/mitchellh/packer/post-processor/vagrant-cloud" + vagrantpostprocessor "github.com/mitchellh/packer/post-processor/vagrant" + virtualboxisobuilder "github.com/mitchellh/packer/builder/virtualbox/iso" + virtualboxovfbuilder "github.com/mitchellh/packer/builder/virtualbox/ovf" + vmwareisobuilder "github.com/mitchellh/packer/builder/vmware/iso" + vmwarevmxbuilder "github.com/mitchellh/packer/builder/vmware/vmx" + vspherepostprocessor "github.com/mitchellh/packer/post-processor/vsphere" + windowsrestartprovisioner "github.com/mitchellh/packer/provisioner/windows-restart" + windowsshellprovisioner "github.com/mitchellh/packer/provisioner/windows-shell" + +) + +type PluginCommand struct { + Meta +} + +var Builders = map[string]packer.Builder{ + "amazon-chroot": new(amazonchrootbuilder.Builder), + "amazon-ebs": new(amazonebsbuilder.Builder), + "amazon-instance": new(amazoninstancebuilder.Builder), + "azure-arm": new(azurearmbuilder.Builder), + "digitalocean": new(digitaloceanbuilder.Builder), + "docker": new(dockerbuilder.Builder), + "file": new(filebuilder.Builder), + "googlecompute": new(googlecomputebuilder.Builder), + "null": new(nullbuilder.Builder), + "openstack": new(openstackbuilder.Builder), + "parallels-iso": new(parallelsisobuilder.Builder), + "parallels-pvm": new(parallelspvmbuilder.Builder), + "qemu": new(qemubuilder.Builder), + "virtualbox-iso": new(virtualboxisobuilder.Builder), + "virtualbox-ovf": new(virtualboxovfbuilder.Builder), + "vmware-iso": new(vmwareisobuilder.Builder), + "vmware-vmx": new(vmwarevmxbuilder.Builder), +} + + +var Provisioners = map[string]packer.Provisioner{ + "ansible": new(ansibleprovisioner.Provisioner), + "ansible-local": new(ansiblelocalprovisioner.Provisioner), + "chef-client": new(chefclientprovisioner.Provisioner), + "chef-solo": new(chefsoloprovisioner.Provisioner), + "file": new(fileprovisioner.Provisioner), + "powershell": new(powershellprovisioner.Provisioner), + "puppet-masterless": new(puppetmasterlessprovisioner.Provisioner), + "puppet-server": new(puppetserverprovisioner.Provisioner), + "salt-masterless": new(saltmasterlessprovisioner.Provisioner), + "shell": new(shellprovisioner.Provisioner), + "shell-local": new(shelllocalprovisioner.Provisioner), + "windows-restart": new(windowsrestartprovisioner.Provisioner), + "windows-shell": new(windowsshellprovisioner.Provisioner), +} + + +var PostProcessors = map[string]packer.PostProcessor{ + "amazon-import": new(amazonimportpostprocessor.PostProcessor), + "artifice": new(artificepostprocessor.PostProcessor), + "atlas": new(atlaspostprocessor.PostProcessor), + "checksum": new(checksumpostprocessor.PostProcessor), + "compress": new(compresspostprocessor.PostProcessor), + "docker-import": new(dockerimportpostprocessor.PostProcessor), + "docker-push": new(dockerpushpostprocessor.PostProcessor), + "docker-save": new(dockersavepostprocessor.PostProcessor), + "docker-tag": new(dockertagpostprocessor.PostProcessor), + "googlecompute-export": new(googlecomputeexportpostprocessor.PostProcessor), + "manifest": new(manifestpostprocessor.PostProcessor), + "shell-local": new(shelllocalpostprocessor.PostProcessor), + "vagrant": new(vagrantpostprocessor.PostProcessor), + "vagrant-cloud": new(vagrantcloudpostprocessor.PostProcessor), + "vsphere": new(vspherepostprocessor.PostProcessor), +} + + +var pluginRegexp = regexp.MustCompile("packer-(builder|post-processor|provisioner)-(.+)") + +func (c *PluginCommand) Run(args []string) int { + // This is an internal call (users should not call this directly) so we're + // not going to do much input validation. If there's a problem we'll often + // just crash. Error handling should be added to facilitate debugging. + log.Printf("args: %#v", args) + if len(args) != 1 { + c.Ui.Error("Wrong number of args") + return 1 + } + + // Plugin will match something like "packer-builder-amazon-ebs" + parts := pluginRegexp.FindStringSubmatch(args[0]) + if len(parts) != 3 { + c.Ui.Error(fmt.Sprintf("Error parsing plugin argument [DEBUG]: %#v", parts)) + return 1 + } + pluginType := parts[1] // capture group 1 (builder|post-processor|provisioner) + pluginName := parts[2] // capture group 2 (.+) + + server, err := plugin.Server() + if err != nil { + c.Ui.Error(fmt.Sprintf("Error starting plugin server: %s", err)) + return 1 + } + + switch pluginType { + case "builder": + builder, found := Builders[pluginName] + if !found { + c.Ui.Error(fmt.Sprintf("Could not load builder: %s", pluginName)) + return 1 + } + server.RegisterBuilder(builder) + case "provisioner": + provisioner, found := Provisioners[pluginName] + if !found { + c.Ui.Error(fmt.Sprintf("Could not load provisioner: %s", pluginName)) + return 1 + } + server.RegisterProvisioner(provisioner) + case "post-processor": + postProcessor, found := PostProcessors[pluginName] + if !found { + c.Ui.Error(fmt.Sprintf("Could not load post-processor: %s", pluginName)) + return 1 + } + server.RegisterPostProcessor(postProcessor) + } + + server.Serve() + + return 0 +} + +func (*PluginCommand) Help() string { + helpText := ` +Usage: packer plugin PLUGIN + + Runs an internally-compiled version of a plugin from the packer binary. + + NOTE: this is an internal command and you should not call it yourself. +` + + return strings.TrimSpace(helpText) +} + +func (c *PluginCommand) Synopsis() string { + return "internal plugin command" +} From 1b9b37bdc1fb932942b132d076691f8bf186656a Mon Sep 17 00:00:00 2001 From: Scott Crunkleton Date: Mon, 15 Aug 2016 16:25:57 -0700 Subject: [PATCH 3/3] Added some error handling and logs for the GCE export startup script. --- .../googlecompute-export/startup.go | 52 +++++++++++++++---- 1 file changed, 42 insertions(+), 10 deletions(-) diff --git a/post-processor/googlecompute-export/startup.go b/post-processor/googlecompute-export/startup.go index 0a4ab5204..abd9f752f 100644 --- a/post-processor/googlecompute-export/startup.go +++ b/post-processor/googlecompute-export/startup.go @@ -11,6 +11,15 @@ DISKNAME=${NAME}-toexport PATHS=$(GetMetadata paths) ZONE=$(GetMetadata zone) +Exit () { + for i in ${PATHS}; do + LOGDEST="${i}.exporter.log" + echo "Uploading exporter log to ${LOGDEST}..." + gsutil -h "Content-Type:text/plain" cp /var/log/daemon.log ${LOGDEST} + done + exit $1 +} + echo "####### Export configuration #######" echo "Image name - ${IMAGENAME}" echo "Instance name - ${NAME}" @@ -20,25 +29,48 @@ echo "Export paths - ${PATHS}" echo "####################################" echo "Creating disk from image to be exported..." -gcloud compute disks create ${DISKNAME} --image ${IMAGENAME} --zone ${ZONE} +if ! gcloud compute disks create ${DISKNAME} --image ${IMAGENAME} --zone ${ZONE}; then + echo "Failed to create disk." + Exit 1 +fi + echo "Attaching disk..." -gcloud compute instances attach-disk ${NAME} --disk ${DISKNAME} --device-name toexport --zone ${ZONE} +if ! gcloud compute instances attach-disk ${NAME} --disk ${DISKNAME} --device-name toexport --zone ${ZONE}; then + echo "Failed to attach disk." + Exit 1 +fi echo "Dumping disk..." -dd if=/dev/disk/by-id/google-toexport of=disk.raw bs=4096 conv=sparse +if ! dd if=/dev/disk/by-id/google-toexport of=disk.raw bs=4096 conv=sparse; then + echo "Failed to dump disk to image." + Exit 1 +fi + echo "Compressing and tar'ing disk image..." -tar -czf root.tar.gz disk.raw +if ! tar -czf root.tar.gz disk.raw; then + echo "Failed to tar disk image." + Exit 1 +fi echo "Detaching disk..." -gcloud compute instances detach-disk ${NAME} --disk ${DISKNAME} --zone ${ZONE} +if ! gcloud compute instances detach-disk ${NAME} --disk ${DISKNAME} --zone ${ZONE}; then + echo "Failed to detach disk." +fi + +FAIL=0 echo "Deleting disk..." -gcloud compute disks delete ${DISKNAME} --zone ${ZONE} +if ! gcloud compute disks delete ${DISKNAME} --zone ${ZONE}; then + echo "Failed to delete disk." + FAIL=1 +fi for i in ${PATHS}; do echo "Uploading tar'ed disk image to ${i}..." - gsutil -o GSUtil:parallel_composite_upload_threshold=100M cp root.tar.gz ${i} - LOGDEST="${i}.exporter.log" - echo "Uploading exporter log to ${LOGDEST}..." - gsutil -h "Content-Type:text/plain" cp /var/log/daemon.log ${LOGDEST} + if ! gsutil -o GSUtil:parallel_composite_upload_threshold=100M cp root.tar.gz ${i}; then + echo "Failed to upload image to ${i}." + FAIL=1 + fi done + +Exit ${FAIL} `