Commit 1b6d9b85 authored by Chris Bednarski's avatar Chris Bednarski

Merge pull request #2571 from mitchellh/artifice

Artifice post-processor
parents 100d2b11 16d7e754
package main
import (
"github.com/mitchellh/packer/packer/plugin"
"github.com/mitchellh/packer/post-processor/artifice"
)
func main() {
server, err := plugin.Server()
if err != nil {
panic(err)
}
server.RegisterPostProcessor(new(artifice.PostProcessor))
server.Serve()
}
package artifice
import (
"fmt"
"os"
"strings"
)
const BuilderId = "packer.post-processor.artifice"
type Artifact struct {
files []string
}
func NewArtifact(files []string) (*Artifact, error) {
for _, f := range files {
if _, err := os.Stat(f); err != nil {
return nil, err
}
}
artifact := &Artifact{
files: files,
}
return artifact, nil
}
func (a *Artifact) BuilderId() string {
return BuilderId
}
func (a *Artifact) Files() []string {
return a.files
}
func (a *Artifact) Id() string {
return ""
}
func (a *Artifact) String() string {
files := strings.Join(a.files, ", ")
return fmt.Sprintf("Created artifact from files: %s", files)
}
func (a *Artifact) State(name string) interface{} {
return nil
}
func (a *Artifact) Destroy() error {
for _, f := range a.files {
err := os.RemoveAll(f)
if err != nil {
return err
}
}
return nil
}
package artifice
import (
"fmt"
"strings"
"github.com/mitchellh/packer/common"
"github.com/mitchellh/packer/helper/config"
"github.com/mitchellh/packer/packer"
"github.com/mitchellh/packer/template/interpolate"
)
// The artifact-override post-processor allows you to specify arbitrary files as
// artifacts. These will override any other artifacts created by the builder.
// This allows you to use a builder and provisioner to create some file, such as
// a compiled binary or tarball, extract it from the builder (VM or container)
// and then save that binary or tarball and throw away the builder.
type Config struct {
common.PackerConfig `mapstructure:",squash"`
Files []string `mapstructure:"files"`
Keep bool `mapstructure:"keep_input_artifact"`
ctx interpolate.Context
}
type PostProcessor struct {
config Config
}
func (p *PostProcessor) Configure(raws ...interface{}) error {
err := config.Decode(&p.config, &config.DecodeOpts{
Interpolate: true,
InterpolateContext: &p.config.ctx,
InterpolateFilter: &interpolate.RenderFilter{
Exclude: []string{},
},
}, raws...)
if err != nil {
return err
}
if len(p.config.Files) == 0 {
return fmt.Errorf("No files specified in artifice configuration")
}
return nil
}
func (p *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (packer.Artifact, bool, error) {
if len(artifact.Files()) > 0 {
ui.Say(fmt.Sprintf("Discarding artifact files: %s", strings.Join(artifact.Files(), ", ")))
}
artifact, err := NewArtifact(p.config.Files)
ui.Say(fmt.Sprintf("Using these artifact files: %s", strings.Join(artifact.Files(), ", ")))
return artifact, true, err
}
---
description: |
The Atlas post-processor for Packer receives an artifact from a Packer build and
uploads it to Atlas. Atlas hosts and serves artifacts, allowing you to version
and distribute them in a simple way.
layout: docs
page_title: 'Atlas Post-Processor'
...
# Artifice Post-Processor
\~> This is a beta feature, and may change significantly before it is
finalized. Please open a [GitHub issue to provide
feedback](https://github.com/mitchellh/packer/issues).
Type: `artifice`
The artifice post-processor overrides the artifact list from an upstream builder
or post-processor. All downstream post-processors will see the new artifacts you
specify. The primary use-case is to build artifacts inside a packer builder --
for example, spinning up an EC2 instance to build a docker container -- and then
extracting the docker container and throwing away the EC2 instance.
After overriding the artifact with artifice, you can use it with other
post-processors like
[compress](https://packer.io/docs/post-processors/compress.html),
[docker-push](https://packer.io/docs/post-processors/docker-push.html),
[Atlas](https://packer.io/docs/post-processors/atlas.html), or a third-party
post-processor.
Artifice allows you to use the familiar packer workflow to create a fresh,
stateless build environment for each build on the infrastructure of your
choosing. You can use this to build just about anything: buildpacks, containers,
jars, binaries, tarballs, msi installers, and more.
## Workflow
Artifice helps you tie together a few other packer features:
- A builder, which spins up a VM (or container) to build your artifact
- A provisioner, which performs the steps to create your artifact
- A file provisioner, which downloads the artifact from the VM
- The artifice post-processor, which identifies which files have been
downloaded from the VM
- Additional post-processors, which push the artifact to Atlas, Docker
hub, etc.
You will want to perform as much work as possible inside the VM. Ideally
the only other post-processor you need after artifice is one that uploads your
artifact to the appropriate repository.
## Configuration
The configuration allows you to specify which files comprise your artifact.
### Required:
- `files` (array of strings) - A list of files that comprise your artifact.
These files must exist on your local disk after the provisioning phase of
packer is complete. These will replace any of the builder's original
artifacts (such as a VM snapshot).
### Example Configuration
This minimal example:
1. Spins up a cloned VMware virtual machine
2. Installs a [consul](https://consul.io/) release
3. Downloads the consul binary
4. Packages it into a `.tar.gz` file
5. Uploads it to Atlas.
VMX is a fast way to build and test locally, but you can easily substitute another builder.
``` {.javascript}
{
"builders": [
{
"type": "vmware-vmx",
"source_path": "/opt/ubuntu-1404-vmware.vmx",
"ssh_username": "vagrant",
"ssh_password": "vagrant",
"shutdown_command": "sudo shutdown -h now",
"headless":"true",
"skip_compaction":"true"
}
],
"provisioners": [
{
"type": "shell",
"inline": [
"sudo apt-get install -y python-pip",
"sudo pip install ifs",
"sudo ifs install consul --version=0.5.2"
]
},
{
"type": "file",
"source": "/usr/local/bin/consul",
"destination": "consul",
"direction": "download"
}
],
"post-processors": [
[
{
"type": "artifice",
"files": ["consul"]
},
{
"type": "compress",
"output": "consul-0.5.2.tar.gz"
},
{
"type":"atlas",
"artifact": "hashicorp/consul",
"artifact_type": "archive"
}
]
]
}
```
**Notice that there are two sets of square brackets in the post-processor
section.** This creates a post-processor chain, where the output of the
proceeding artifact is passed to subsequent post-processors. If you use only one
set of square braces the post-processors will run individually against the build
artifact (the vmx file in this case) and it will not have the desired result.
"post-processors": [
[ <--- Start post-processor chain
{
"type": "artifice",
"files": ["consul"]
},
{
"type": "atlas",
...
}
], <--- End post-processor chain
{
"type":"compress" <-- Standalone post-processor
}
]
You can create multiple post-processor chains to handle multiple builders (for example,
building linux and windows binaries during the same build).
......@@ -69,6 +69,7 @@
<li>
<h4>Post-Processors</h4>
</li>
<li><a href="/docs/post-processors/artifice.html">Artifice</a></li>
<li><a href="/docs/post-processors/atlas.html">Atlas</a></li>
<li><a href="/docs/post-processors/compress.html">compress</a></li>
<li><a href="/docs/post-processors/docker-import.html">docker-import</a></li>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment