Commit acc1aa06 authored by Chris Bednarski's avatar Chris Bednarski

Merge pull request #2556 from mitchellh/b-2414

Add interpolation for .BuildName in compress output filename
parents 211817c7 1c956ff4
...@@ -55,9 +55,12 @@ func (p *PostProcessor) Configure(raws ...interface{}) error { ...@@ -55,9 +55,12 @@ func (p *PostProcessor) Configure(raws ...interface{}) error {
Interpolate: true, Interpolate: true,
InterpolateContext: &p.config.ctx, InterpolateContext: &p.config.ctx,
InterpolateFilter: &interpolate.RenderFilter{ InterpolateFilter: &interpolate.RenderFilter{
Exclude: []string{}, Exclude: []string{"output"},
}, },
}, raws...) }, raws...)
if err != nil {
return err
}
errs := new(packer.MultiError) errs := new(packer.MultiError)
...@@ -67,16 +70,7 @@ func (p *PostProcessor) Configure(raws ...interface{}) error { ...@@ -67,16 +70,7 @@ func (p *PostProcessor) Configure(raws ...interface{}) error {
} }
if p.config.OutputPath == "" { if p.config.OutputPath == "" {
p.config.OutputPath = "packer_{{.BuildName}}_{{.Provider}}" p.config.OutputPath = "packer_{{.BuildName}}_{{.BuilderType}}"
}
if err = interpolate.Validate(p.config.OutputPath, &p.config.ctx); err != nil {
errs = packer.MultiErrorAppend(
errs, fmt.Errorf("Error parsing target template: %s", err))
}
templates := map[string]*string{
"output": &p.config.OutputPath,
} }
if p.config.CompressionLevel > pgzip.BestCompression { if p.config.CompressionLevel > pgzip.BestCompression {
...@@ -89,17 +83,9 @@ func (p *PostProcessor) Configure(raws ...interface{}) error { ...@@ -89,17 +83,9 @@ func (p *PostProcessor) Configure(raws ...interface{}) error {
p.config.CompressionLevel = pgzip.DefaultCompression p.config.CompressionLevel = pgzip.DefaultCompression
} }
for key, ptr := range templates { if err = interpolate.Validate(p.config.OutputPath, &p.config.ctx); err != nil {
if *ptr == "" { errs = packer.MultiErrorAppend(
errs = packer.MultiErrorAppend( errs, fmt.Errorf("Error parsing target template: %s", err))
errs, fmt.Errorf("%s must be set", key))
}
*ptr, err = interpolate.Render(p.config.OutputPath, &p.config.ctx)
if err != nil {
errs = packer.MultiErrorAppend(
errs, fmt.Errorf("Error processing %s: %s", key, err))
}
} }
p.config.detectFromFilename() p.config.detectFromFilename()
...@@ -113,7 +99,19 @@ func (p *PostProcessor) Configure(raws ...interface{}) error { ...@@ -113,7 +99,19 @@ func (p *PostProcessor) Configure(raws ...interface{}) error {
func (p *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (packer.Artifact, bool, error) { func (p *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (packer.Artifact, bool, error) {
target := p.config.OutputPath // These are extra variables that will be made available for interpolation.
p.config.ctx.Data = map[string]string{
"BuildName": p.config.PackerBuildName,
"BuilderType": p.config.PackerBuilderType,
}
target, err := interpolate.Render(p.config.OutputPath, &p.config.ctx)
if err != nil {
return nil, false, fmt.Errorf("Error interpolating output value: %s", err)
} else {
fmt.Println(target)
}
keep := p.config.KeepInputArtifact keep := p.config.KeepInputArtifact
newArtifact := &Artifact{Path: target} newArtifact := &Artifact{Path: target}
......
...@@ -150,6 +150,37 @@ func TestCompressOptions(t *testing.T) { ...@@ -150,6 +150,37 @@ func TestCompressOptions(t *testing.T) {
} }
} }
func TestCompressInterpolation(t *testing.T) {
const config = `
{
"post-processors": [
{
"type": "compress",
"output": "{{ build_name}}-{{ .BuildName }}-{{.BuilderType}}.gz"
}
]
}
`
artifact := testArchive(t, config)
defer artifact.Destroy()
// You can interpolate using the .BuildName variable or build_name global
// function. We'll check both.
filename := "chocolate-vanilla-file.gz"
archive, err := os.Open(filename)
if err != nil {
t.Fatalf("Unable to read %s: %s", filename, err)
}
gzipReader, _ := gzip.NewReader(archive)
data, _ := ioutil.ReadAll(gzipReader)
if string(data) != expectedFileContents {
t.Errorf("Expected:\n%s\nFound:\n%s\n", expectedFileContents, data)
}
}
// Test Helpers // Test Helpers
func setup(t *testing.T) (packer.Ui, packer.Artifact, error) { func setup(t *testing.T) (packer.Ui, packer.Artifact, error) {
...@@ -201,6 +232,13 @@ func testArchive(t *testing.T, config string) packer.Artifact { ...@@ -201,6 +232,13 @@ func testArchive(t *testing.T, config string) packer.Artifact {
compressor := PostProcessor{} compressor := PostProcessor{}
compressor.Configure(tpl.PostProcessors[0][0].Config) compressor.Configure(tpl.PostProcessors[0][0].Config)
// I get the feeling these should be automatically available somewhere, but
// some of the post-processors construct this manually.
compressor.config.ctx.BuildName = "chocolate"
compressor.config.PackerBuildName = "vanilla"
compressor.config.PackerBuilderType = "file"
artifactOut, _, err := compressor.PostProcess(ui, artifact) artifactOut, _, err := compressor.PostProcess(ui, artifact)
if err != nil { if err != nil {
t.Fatalf("Failed to compress artifact: %s", err) t.Fatalf("Failed to compress artifact: %s", err)
......
...@@ -15,10 +15,11 @@ VMware or VirtualBox) and compresses the artifact into a single archive. ...@@ -15,10 +15,11 @@ VMware or VirtualBox) and compresses the artifact into a single archive.
## Configuration ## Configuration
### Required: ### Optional:
You must specify the output filename. The archive format is derived from the By default, packer will build archives in `.tar.gz` format with the following
filename. filename: `packer_{{.BuildName}}_{{.BuilderType}}`. If you want to change this
you will need to specify the `output` option.
- `output` (string) - The path to save the compressed archive. The archive - `output` (string) - The path to save the compressed archive. The archive
format is inferred from the filename. E.g. `.tar.gz` will be a format is inferred from the filename. E.g. `.tar.gz` will be a
...@@ -26,13 +27,9 @@ filename. ...@@ -26,13 +27,9 @@ filename.
detected packer defaults to `.tar.gz` behavior but will not change detected packer defaults to `.tar.gz` behavior but will not change
the filename. the filename.
If you are executing multiple builders in parallel you should make sure `output` You can use `{{.BuildName}}` and `{{.BuilderType}}` in your output path.
is unique for each one. For example `packer_{{.BuildName}}_{{.Provider}}.zip`. If you are executing multiple builders in parallel you should make sure
`output` is unique for each one. For example `packer_{{.BuildName}}.zip`.
### Optional:
If you want more control over how the archive is created you can specify the
following settings:
- `compression_level` (integer) - Specify the compression level, for - `compression_level` (integer) - Specify the compression level, for
algorithms that support it, from 1 through 9 inclusive. Typically higher algorithms that support it, from 1 through 9 inclusive. Typically higher
...@@ -61,14 +58,14 @@ configuration: ...@@ -61,14 +58,14 @@ configuration:
``` {.json} ``` {.json}
{ {
"type": "compress", "type": "compress",
"output": "archive.zip" "output": "{{.BuildName}}_bundle.zip"
} }
``` ```
``` {.json} ``` {.json}
{ {
"type": "compress", "type": "compress",
"output": "archive.gz", "output": "log_{{.BuildName}}.gz",
"compression": 9 "compression": 9
} }
``` ```
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment