Commit e9c2e506 authored by Matthew Holt's avatar Matthew Holt

Merge branch 'master' into letsencryptfix

# Conflicts:
#	caddy/letsencrypt/letsencrypt.go
parents 55601d3e b6326d40
......@@ -2,7 +2,7 @@ language: go
go:
- 1.4.3
- 1.5.1
- 1.5.2
- tip
install:
......
## Contributing to Caddy
**[Join us on Slack](https://gophers.slack.com/messages/caddy/)** to chat with
other Caddy developers! ([Request an invite](http://bit.ly/go-slack-signup),
then join the #caddy channel.)
**[Join our dev chat on Gitter](https://gitter.im/mholt/caddy)** to chat with
other Caddy developers! (Dev chat only; try our
[support room](https://gitter.im/caddyserver/support) for help or
[general](https://gitter.im/caddyserver/general) for anything else.)
This project gladly accepts contributions and we encourage interested users to
get involved!
......@@ -11,24 +12,25 @@ get involved!
#### For small tweaks, bug fixes, and tests
Submit [pull requests](https://github.com/mholt/caddy/pulls) at any time.
Thank you for helping out in simple ways! Bug fixes should be under test to
assert correct behavior.
Bug fixes should be under test to assert correct behavior. Thank you for
helping out in simple ways!
#### Ideas, questions, bug reports
You should totally [open an issue](https://github.com/mholt/caddy/issues) with
your ideas, questions, and bug reports, if one does not already exist for it.
Bug reports should state expected behavior and contain clear instructions for
reproducing the problem.
Feel free to [open an issue](https://github.com/mholt/caddy/issues) with your
ideas, questions, and bug reports, if one does not already exist for it. Bug
reports should state expected behavior and contain clear instructions for
isolating and reproducing the problem.
See [How to Report Bugs Effectively](http://www.chiark.greenend.org.uk/~sgtatham/bugs.html).
#### New features
Before submitting a pull request, please open an issue first to discuss it and
claim it. This prevents overlapping efforts and keeps the project in-line with
its goals. If you prefer to discuss the feature privately, you can reach other
developers on Slack or you may email me directly. (My email address is below.)
developers on Gitter or you may email me directly. (My email address is below.)
And don't forget to write tests for new features!
......
[![Caddy](https://caddyserver.com/resources/images/caddy-boxed.png)](https://caddyserver.com)
[![Dev Chat](https://img.shields.io/badge/dev%20chat-gitter-ff69b4.svg?style=flat-square&label=dev+chat&color=ff69b4)](https://gitter.im/mholt/caddy)
[![Documentation](https://img.shields.io/badge/godoc-reference-blue.svg?style=flat-square)](https://godoc.org/github.com/mholt/caddy)
[![Linux Build Status](https://img.shields.io/travis/mholt/caddy.svg?style=flat-square&label=linux+build)](https://travis-ci.org/mholt/caddy)
[![Windows Build Status](https://img.shields.io/appveyor/ci/mholt/caddy.svg?style=flat-square&label=windows+build)](https://ci.appveyor.com/project/mholt/caddy)
......@@ -95,7 +96,7 @@ You may also be interested in the [developer guide]
## Running from Source
Note: You will need **[Go 1.4](https://golang.org/dl)** or a later version.
Note: You will need **[Go 1.4](https://golang.org/dl/)** or a later version.
1. `$ go get github.com/mholt/caddy`
2. `cd` into your website's directory
......@@ -120,11 +121,11 @@ ports < 1024 like 80 and 443.
Caddy is available as a Docker container from any of these sources:
- [abiosoft/caddy](https://registry.hub.docker.com/u/abiosoft/caddy/)
- [darron/caddy](https://registry.hub.docker.com/u/darron/caddy/)
- [joshix/caddy](https://registry.hub.docker.com/u/joshix/caddy/)
- [jumanjiman/caddy](https://registry.hub.docker.com/u/jumanjiman/caddy/)
- [zenithar/nano-caddy](https://registry.hub.docker.com/u/zenithar/nano-caddy/)
- [abiosoft/caddy](https://hub.docker.com/r/abiosoft/caddy/)
- [darron/caddy](https://hub.docker.com/r/darron/caddy/)
- [joshix/caddy](https://hub.docker.com/r/joshix/caddy/)
- [jumanjiman/caddy](https://hub.docker.com/r/jumanjiman/caddy/)
- [zenithar/nano-caddy](https://hub.docker.com/r/zenithar/nano-caddy/)
......@@ -139,9 +140,10 @@ packages that each Caddy package imports.
## Contributing
**[Join us on Slack](https://gophers.slack.com/messages/caddy/)** to chat with
other Caddy developers! ([Request an invite](http://bit.ly/go-slack-signup),
then join the #caddy channel.)
**[Join our dev chat on Gitter](https://gitter.im/mholt/caddy)** to chat with
other Caddy developers! (Dev chat only; try our
[support room](https://gitter.im/caddyserver/support) for help or
[general](https://gitter.im/caddyserver/general) for anything else.)
This project would not be what it is without your help. Please see the
[contributing guidelines](https://github.com/mholt/caddy/blob/master/CONTRIBUTING.md)
......
......@@ -49,6 +49,7 @@ func ObtainCertsAndConfigure(configs []server.Config, optPort string) ([]server.
// obtain certificates for configs that need one, and reconfigure each
// config to use the certificates
finishedHosts := make(map[string]struct{})
for leEmail, cfgIndexes := range groupedConfigs {
// make client to service this email address with CA server
client, err := newClientPort(leEmail, optPort)
......@@ -60,6 +61,12 @@ func ObtainCertsAndConfigure(configs []server.Config, optPort string) ([]server.
for _, idx := range cfgIndexes {
hostname := configs[idx].Host
// prevent duplicate efforts, for example, when host is served on multiple ports
if _, ok := finishedHosts[hostname]; ok {
continue
}
finishedHosts[hostname] = struct{}{}
Obtain:
certificate, failures := client.ObtainCertificate([]string{hostname}, true)
if len(failures) == 0 {
......
......@@ -144,7 +144,7 @@ func getEmail(cfg server.Config) string {
// Alas, we must bother the user and ask for an email address;
// if they proceed they also agree to the SA.
reader := bufio.NewReader(stdin)
fmt.Println("Your sites will be served over HTTPS automatically using Let's Encrypt.")
fmt.Println("\nYour sites will be served over HTTPS automatically using Let's Encrypt.")
fmt.Println("By continuing, you agree to the Let's Encrypt Subscriber Agreement at:")
fmt.Println(" " + saURL) // TODO: Show current SA link
fmt.Println("Please enter your email address so you can recover your account if needed.")
......
glob0.host0 {
dir2 arg1
}
glob0.host1 {
}
glob1.host0 {
dir1
dir2 arg1
}
glob2.host0 {
dir2 arg1
}
......@@ -177,19 +177,52 @@ func (p *parser) directives() error {
}
// doImport swaps out the import directive and its argument
// (a total of 2 tokens) with the tokens in the file specified.
// When the function returns, the cursor is on the token before
// where the import directive was. In other words, call Next()
// to access the first token that was imported.
// (a total of 2 tokens) with the tokens in the specified file
// or globbing pattern. When the function returns, the cursor
// is on the token before where the import directive was. In
// other words, call Next() to access the first token that was
// imported.
func (p *parser) doImport() error {
if !p.NextArg() {
return p.ArgErr()
}
importFile := p.Val()
importPattern := p.Val()
if p.NextArg() {
return p.Err("Import allows only one file to import")
return p.Err("Import allows only one expression, either file or glob pattern")
}
matches, err := filepath.Glob(importPattern)
if err != nil {
return p.Errf("Failed to use import pattern %s - %s", importPattern, err.Error())
}
if len(matches) == 0 {
return p.Errf("No files matching the import pattern %s", importPattern)
}
// Splice out the import directive and its argument (2 tokens total)
// and insert the imported tokens in their place.
tokensBefore := p.tokens[:p.cursor-1]
tokensAfter := p.tokens[p.cursor+1:]
// cursor was advanced one position to read filename; rewind it
p.cursor--
p.tokens = tokensBefore
for _, importFile := range matches {
if err := p.doSingleImport(importFile); err != nil {
return err
}
}
p.tokens = append(p.tokens, append(tokensAfter)...)
return nil
}
// doSingleImport lexes the individual files matching the
// globbing pattern from of the import directive.
func (p *parser) doSingleImport(importFile string) error {
file, err := os.Open(importFile)
if err != nil {
return p.Errf("Could not import %s - %v", importFile, err)
......@@ -204,10 +237,7 @@ func (p *parser) doImport() error {
// Splice out the import directive and its argument (2 tokens total)
// and insert the imported tokens in their place.
tokensBefore := p.tokens[:p.cursor-1]
tokensAfter := p.tokens[p.cursor+1:]
p.tokens = append(tokensBefore, append(importedTokens, tokensAfter...)...)
p.cursor-- // cursor was advanced one position to read the filename; rewind it
p.tokens = append(p.tokens, append(importedTokens)...)
return nil
}
......
......@@ -333,6 +333,13 @@ func TestParseAll(t *testing.T) {
[]address{{"http://host1.com", "http", "host1.com", "80"}, {"http://host2.com", "http", "host2.com", "80"}},
[]address{{"https://host3.com", "https", "host3.com", "443"}, {"https://host4.com", "https", "host4.com", "443"}},
}},
{`import import_glob*.txt`, false, [][]address{
[]address{{"glob0.host0", ""}},
[]address{{"glob0.host1", ""}},
[]address{{"glob1.host0", ""}},
[]address{{"glob2.host0", ""}},
}},
} {
p := testParser(test.input)
blocks, err := p.parseAll()
......
......@@ -27,9 +27,13 @@ func gzipParse(c *Controller) ([]gzip.Config, error) {
for c.Next() {
config := gzip.Config{}
// Request Filters
pathFilter := gzip.PathFilter{IgnoredPaths: make(gzip.Set)}
extFilter := gzip.ExtFilter{Exts: make(gzip.Set)}
// Response Filters
lengthFilter := gzip.LengthFilter(0)
// No extra args expected
if len(c.RemainingArgs()) > 0 {
return configs, c.ArgErr()
......@@ -68,24 +72,42 @@ func gzipParse(c *Controller) ([]gzip.Config, error) {
}
level, _ := strconv.Atoi(c.Val())
config.Level = level
case "min_length":
if !c.NextArg() {
return configs, c.ArgErr()
}
length, err := strconv.ParseInt(c.Val(), 10, 64)
if err != nil {
return configs, err
} else if length == 0 {
return configs, fmt.Errorf(`gzip: min_length must be greater than 0`)
}
lengthFilter = gzip.LengthFilter(length)
default:
return configs, c.ArgErr()
}
}
config.Filters = []gzip.Filter{}
// Request Filters
config.RequestFilters = []gzip.RequestFilter{}
// If ignored paths are specified, put in front to filter with path first
if len(pathFilter.IgnoredPaths) > 0 {
config.Filters = []gzip.Filter{pathFilter}
config.RequestFilters = []gzip.RequestFilter{pathFilter}
}
// Then, if extensions are specified, use those to filter.
// Otherwise, use default extensions filter.
if len(extFilter.Exts) > 0 {
config.Filters = append(config.Filters, extFilter)
config.RequestFilters = append(config.RequestFilters, extFilter)
} else {
config.Filters = append(config.Filters, gzip.DefaultExtFilter())
config.RequestFilters = append(config.RequestFilters, gzip.DefaultExtFilter())
}
// Response Filters
// If min_length is specified, use it.
if int64(lengthFilter) != 0 {
config.ResponseFilters = append(config.ResponseFilters, lengthFilter)
}
configs = append(configs, config)
......
......@@ -73,6 +73,18 @@ func TestGzip(t *testing.T) {
level 1
}
`, false},
{`gzip { not /file
ext *
level 1
min_length ab
}
`, true},
{`gzip { not /file
ext *
level 1
min_length 1000
}
`, false},
}
for i, test := range tests {
c := NewTestController(test.input)
......
package setup
import (
"net/http"
"strings"
"github.com/mholt/caddy/middleware"
"github.com/mholt/caddy/middleware/rewrite"
)
......@@ -13,7 +16,11 @@ func Rewrite(c *Controller) (middleware.Middleware, error) {
}
return func(next middleware.Handler) middleware.Handler {
return rewrite.Rewrite{Next: next, Rules: rewrites}
return rewrite.Rewrite{
Next: next,
FileSys: http.Dir(c.Root),
Rules: rewrites,
}
}, nil
}
......@@ -30,6 +37,8 @@ func rewriteParse(c *Controller) ([]rewrite.Rule, error) {
args := c.RemainingArgs()
var ifs []rewrite.If
switch len(args) {
case 2:
rule = rewrite.NewSimpleRule(args[0], args[1])
......@@ -46,25 +55,36 @@ func rewriteParse(c *Controller) ([]rewrite.Rule, error) {
}
pattern = c.Val()
case "to":
if !c.NextArg() {
args1 := c.RemainingArgs()
if len(args1) == 0 {
return nil, c.ArgErr()
}
to = c.Val()
to = strings.Join(args1, " ")
case "ext":
args1 := c.RemainingArgs()
if len(args1) == 0 {
return nil, c.ArgErr()
}
ext = args1
case "if":
args1 := c.RemainingArgs()
if len(args1) != 3 {
return nil, c.ArgErr()
}
ifCond, err := rewrite.NewIf(args1[0], args1[1], args1[2])
if err != nil {
return nil, err
}
ifs = append(ifs, ifCond)
default:
return nil, c.ArgErr()
}
}
// ensure pattern and to are specified
if pattern == "" || to == "" {
// ensure to is specified
if to == "" {
return nil, c.ArgErr()
}
if rule, err = rewrite.NewRegexpRule(base, pattern, to, ext); err != nil {
if rule, err = rewrite.NewComplexRule(base, pattern, to, ext, ifs); err != nil {
return nil, err
}
regexpRules = append(regexpRules, rule)
......
package setup
import (
"testing"
"fmt"
"regexp"
"testing"
"github.com/mholt/caddy/middleware/rewrite"
)
......@@ -96,16 +95,16 @@ func TestRewriteParse(t *testing.T) {
}{
{`rewrite {
r .*
to /to
to /to /index.php?
}`, false, []rewrite.Rule{
&rewrite.RegexpRule{Base: "/", To: "/to", Regexp: regexp.MustCompile(".*")},
&rewrite.ComplexRule{Base: "/", To: "/to /index.php?", Regexp: regexp.MustCompile(".*")},
}},
{`rewrite {
regexp .*
to /to
ext / html txt
}`, false, []rewrite.Rule{
&rewrite.RegexpRule{Base: "/", To: "/to", Exts: []string{"/", "html", "txt"}, Regexp: regexp.MustCompile(".*")},
&rewrite.ComplexRule{Base: "/", To: "/to", Exts: []string{"/", "html", "txt"}, Regexp: regexp.MustCompile(".*")},
}},
{`rewrite /path {
r rr
......@@ -113,29 +112,30 @@ func TestRewriteParse(t *testing.T) {
}
rewrite / {
regexp [a-z]+
to /to
to /to /to2
}
`, false, []rewrite.Rule{
&rewrite.RegexpRule{Base: "/path", To: "/dest", Regexp: regexp.MustCompile("rr")},
&rewrite.RegexpRule{Base: "/", To: "/to", Regexp: regexp.MustCompile("[a-z]+")},
}},
{`rewrite {
to /to
}`, true, []rewrite.Rule{
&rewrite.RegexpRule{},
&rewrite.ComplexRule{Base: "/path", To: "/dest", Regexp: regexp.MustCompile("rr")},
&rewrite.ComplexRule{Base: "/", To: "/to /to2", Regexp: regexp.MustCompile("[a-z]+")},
}},
{`rewrite {
r .*
}`, true, []rewrite.Rule{
&rewrite.RegexpRule{},
&rewrite.ComplexRule{},
}},
{`rewrite {
}`, true, []rewrite.Rule{
&rewrite.RegexpRule{},
&rewrite.ComplexRule{},
}},
{`rewrite /`, true, []rewrite.Rule{
&rewrite.RegexpRule{},
&rewrite.ComplexRule{},
}},
{`rewrite {
to /to
if {path} is a
}`, false, []rewrite.Rule{
&rewrite.ComplexRule{Base: "/", To: "/to", Ifs: []rewrite.If{rewrite.If{A: "{path}", Operator: "is", B: "a"}}},
}},
}
......@@ -157,8 +157,8 @@ func TestRewriteParse(t *testing.T) {
}
for j, e := range test.expected {
actualRule := actual[j].(*rewrite.RegexpRule)
expectedRule := e.(*rewrite.RegexpRule)
actualRule := actual[j].(*rewrite.ComplexRule)
expectedRule := e.(*rewrite.ComplexRule)
if actualRule.Base != expectedRule.Base {
t.Errorf("Test %d, rule %d: Expected Base=%s, got %s",
......@@ -175,11 +175,19 @@ func TestRewriteParse(t *testing.T) {
i, j, expectedRule.To, actualRule.To)
}
if actualRule.Regexp != nil {
if actualRule.String() != expectedRule.String() {
t.Errorf("Test %d, rule %d: Expected Pattern=%s, got %s",
i, j, expectedRule.String(), actualRule.String())
}
}
if fmt.Sprint(actualRule.Ifs) != fmt.Sprint(expectedRule.Ifs) {
t.Errorf("Test %d, rule %d: Expected Pattern=%s, got %s",
i, j, fmt.Sprint(expectedRule.Ifs), fmt.Sprint(actualRule.Ifs))
}
}
}
}
......@@ -91,7 +91,7 @@ func TLS(c *Controller) (middleware.Middleware, error) {
func SetDefaultTLSParams(c *server.Config) {
// If no ciphers provided, use all that Caddy supports for the protocol
if len(c.TLS.Ciphers) == 0 {
c.TLS.Ciphers = supportedCiphers
c.TLS.Ciphers = defaultCiphers
}
// Not a cipher suite, but still important for mitigating protocol downgrade attacks
......@@ -160,3 +160,15 @@ var supportedCiphers = []uint16{
tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,
tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA,
}
// List of all the ciphers we want to use by default
var defaultCiphers = []uint16{
tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
tls.TLS_RSA_WITH_AES_256_CBC_SHA,
tls.TLS_RSA_WITH_AES_128_CBC_SHA,
}
......@@ -42,15 +42,13 @@ func TestTLSParseBasic(t *testing.T) {
tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
tls.TLS_RSA_WITH_AES_256_CBC_SHA,
tls.TLS_RSA_WITH_AES_128_CBC_SHA,
tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,
tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA,
tls.TLS_FALLBACK_SCSV,
}
// Ensure count is correct (plus one for TLS_FALLBACK_SCSV)
if len(c.TLS.Ciphers) != len(supportedCiphers)+1 {
if len(c.TLS.Ciphers) != len(expectedCiphers) {
t.Errorf("Expected %v Ciphers (including TLS_FALLBACK_SCSV), got %v",
len(supportedCiphers)+1, len(c.TLS.Ciphers))
len(expectedCiphers), len(c.TLS.Ciphers))
}
// Ensure ordering is correct
......
CHANGES
0.8 beta
- Let's Encrypt (free, automatic, fully-managed HTTPS for your sites)
- Graceful restarts (for POSIX-compatible systems)
0.8.0 (December 4, 2015)
- HTTPS by default via Let's Encrypt (certs & keys are fully managed)
- Graceful restarts (on POSIX-compliant systems)
- Major internal refactoring to allow use of Caddy as library
- New directive 'mime' to customize Content-Type based on file extension
- New -accept flag to accept Let's Encrypt SA without prompt
......@@ -14,6 +14,7 @@ CHANGES
- New -grace flag to customize the graceful shutdown timeout
- New support for SIGHUP, SIGTERM, and SIGQUIT signals
- browse: Render filenames with multiple whitespace properly
- core: Use environment variables in Caddyfile
- markdown: Include Last-Modified header in response
- markdown: Render tables, strikethrough, and fenced code blocks
- proxy: Ability to exclude/ignore paths from proxying
......
CADDY 0.8 beta 4
CADDY 0.8
Website
https://caddyserver.com
......
......@@ -27,7 +27,7 @@ var (
const (
appName = "Caddy"
appVersion = "0.8 beta 4"
appVersion = "0.8"
)
func init() {
......
......@@ -115,7 +115,13 @@ func (h Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) (int, error)
return http.StatusBadGateway, err
}
return 0, nil
// FastCGI stderr outputs
if fcgi.stderr.Len() != 0 {
// Remove trailing newline, error logger already does this.
err = LogError(strings.TrimSuffix(fcgi.stderr.String(), "\n"))
}
return resp.StatusCode, err
}
}
......@@ -281,3 +287,11 @@ var (
// ErrIndexMissingSplit describes an index configuration error.
ErrIndexMissingSplit = errors.New("configured index file(s) must include split value")
)
// LogError is a non fatal error that allows requests to go through.
type LogError string
// Error satisfies error interface.
func (l LogError) Error() string {
return string(l)
}
......@@ -164,6 +164,7 @@ type FCGIClient struct {
rwc io.ReadWriteCloser
h header
buf bytes.Buffer
stderr bytes.Buffer
keepAlive bool
reqID uint16
}
......@@ -346,11 +347,23 @@ func (w *streamReader) Read(p []byte) (n int, err error) {
if len(p) > 0 {
if len(w.buf) == 0 {
// filter outputs for error log
for {
rec := &record{}
w.buf, err = rec.read(w.c.rwc)
var buf []byte
buf, err = rec.read(w.c.rwc)
if err != nil {
return
}
// standard error output
if rec.h.Type == Stderr {
w.c.stderr.Write(buf)
continue
}
w.buf = buf
break
}
}
n = len(p)
......@@ -387,6 +400,15 @@ func (c *FCGIClient) Do(p map[string]string, req io.Reader) (r io.Reader, err er
return
}
// clientCloser is a io.ReadCloser. It wraps a io.Reader with a Closer
// that closes FCGIClient connection.
type clientCloser struct {
*FCGIClient
io.Reader
}
func (f clientCloser) Close() error { return f.rwc.Close() }
// Request returns a HTTP Response with Header and Body
// from fcgi responder
func (c *FCGIClient) Request(p map[string]string, req io.Reader) (resp *http.Response, err error) {
......@@ -426,9 +448,9 @@ func (c *FCGIClient) Request(p map[string]string, req io.Reader) (resp *http.Res
resp.ContentLength, _ = strconv.ParseInt(resp.Header.Get("Content-Length"), 10, 64)
if chunked(resp.TransferEncoding) {
resp.Body = ioutil.NopCloser(httputil.NewChunkedReader(rb))
resp.Body = clientCloser{c, httputil.NewChunkedReader(rb)}
} else {
resp.Body = ioutil.NopCloser(rb)
resp.Body = clientCloser{c, ioutil.NopCloser(rb)}
}
return
}
......
......@@ -6,6 +6,7 @@ import (
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"net/http"
"strings"
......@@ -23,7 +24,8 @@ type Gzip struct {
// Config holds the configuration for Gzip middleware
type Config struct {
Filters []Filter // Filters to use
RequestFilters []RequestFilter
ResponseFilters []ResponseFilter
Level int // Compression level
}
......@@ -36,8 +38,8 @@ func (g Gzip) ServeHTTP(w http.ResponseWriter, r *http.Request) (int, error) {
outer:
for _, c := range g.Configs {
// Check filters to determine if gzipping is permitted for this request
for _, filter := range c.Filters {
// Check request filters to determine if gzipping is permitted for this request
for _, filter := range c.RequestFilters {
if !filter.ShouldCompress(r) {
continue outer
}
......@@ -46,18 +48,30 @@ outer:
// Delete this header so gzipping is not repeated later in the chain
r.Header.Del("Accept-Encoding")
w.Header().Set("Content-Encoding", "gzip")
w.Header().Set("Vary", "Accept-Encoding")
gzipWriter, err := newWriter(c, w)
// gzipWriter modifies underlying writer at init,
// use a discard writer instead to leave ResponseWriter in
// original form.
gzipWriter, err := newWriter(c, ioutil.Discard)
if err != nil {
// should not happen
return http.StatusInternalServerError, err
}
defer gzipWriter.Close()
gz := gzipResponseWriter{Writer: gzipWriter, ResponseWriter: w}
gz := &gzipResponseWriter{Writer: gzipWriter, ResponseWriter: w}
var rw http.ResponseWriter
// if no response filter is used
if len(c.ResponseFilters) == 0 {
// replace discard writer with ResponseWriter
gzipWriter.Reset(w)
rw = gz
} else {
// wrap gzip writer with ResponseFilterWriter
rw = NewResponseFilterWriter(c.ResponseFilters, gz)
}
// Any response in forward middleware will now be compressed
status, err := g.Next.ServeHTTP(gz, r)
status, err := g.Next.ServeHTTP(rw, r)
// If there was an error that remained unhandled, we need
// to send something back before gzipWriter gets closed at
......@@ -78,7 +92,7 @@ outer:
// newWriter create a new Gzip Writer based on the compression level.
// If the level is valid (i.e. between 1 and 9), it uses the level.
// Otherwise, it uses default compression level.
func newWriter(c Config, w http.ResponseWriter) (*gzip.Writer, error) {
func newWriter(c Config, w io.Writer) (*gzip.Writer, error) {
if c.Level >= gzip.BestSpeed && c.Level <= gzip.BestCompression {
return gzip.NewWriterLevel(w, c.Level)
}
......@@ -90,19 +104,26 @@ func newWriter(c Config, w http.ResponseWriter) (*gzip.Writer, error) {
type gzipResponseWriter struct {
io.Writer
http.ResponseWriter
statusCodeWritten bool
}
// WriteHeader wraps the underlying WriteHeader method to prevent
// problems with conflicting headers from proxied backends. For
// example, a backend system that calculates Content-Length would
// be wrong because it doesn't know it's being gzipped.
func (w gzipResponseWriter) WriteHeader(code int) {
func (w *gzipResponseWriter) WriteHeader(code int) {
w.Header().Del("Content-Length")
w.Header().Set("Content-Encoding", "gzip")
w.Header().Add("Vary", "Accept-Encoding")
w.ResponseWriter.WriteHeader(code)
w.statusCodeWritten = true
}
// Write wraps the underlying Write method to do compression.
func (w gzipResponseWriter) Write(b []byte) (int, error) {
func (w *gzipResponseWriter) Write(b []byte) (int, error) {
if !w.statusCodeWritten {
w.WriteHeader(http.StatusOK)
}
if w.Header().Get("Content-Type") == "" {
w.Header().Set("Content-Type", http.DetectContentType(b))
}
......
......@@ -21,7 +21,7 @@ func TestGzipHandler(t *testing.T) {
extFilter.Exts.Add(e)
}
gz := Gzip{Configs: []Config{
{Filters: []Filter{pathFilter, extFilter}},
{RequestFilters: []RequestFilter{pathFilter, extFilter}},
}}
w := httptest.NewRecorder()
......@@ -80,6 +80,7 @@ func TestGzipHandler(t *testing.T) {
func nextFunc(shouldGzip bool) middleware.Handler {
return middleware.HandlerFunc(func(w http.ResponseWriter, r *http.Request) (int, error) {
w.Write([]byte("test"))
if shouldGzip {
if r.Header.Get("Accept-Encoding") != "" {
return 0, fmt.Errorf("Accept-Encoding header not expected")
......@@ -90,7 +91,7 @@ func nextFunc(shouldGzip bool) middleware.Handler {
if w.Header().Get("Vary") != "Accept-Encoding" {
return 0, fmt.Errorf("Vary must be Accept-Encoding, found %v", r.Header.Get("Vary"))
}
if _, ok := w.(gzipResponseWriter); !ok {
if _, ok := w.(*gzipResponseWriter); !ok {
return 0, fmt.Errorf("ResponseWriter should be gzipResponseWriter, found %T", w)
}
return 0, nil
......@@ -101,7 +102,7 @@ func nextFunc(shouldGzip bool) middleware.Handler {
if w.Header().Get("Content-Encoding") == "gzip" {
return 0, fmt.Errorf("Content-Encoding must not be gzip, found gzip")
}
if _, ok := w.(gzipResponseWriter); ok {
if _, ok := w.(*gzipResponseWriter); ok {
return 0, fmt.Errorf("ResponseWriter should not be gzipResponseWriter")
}
return 0, nil
......
......@@ -7,8 +7,8 @@ import (
"github.com/mholt/caddy/middleware"
)
// Filter determines if a request should be gzipped.
type Filter interface {
// RequestFilter determines if a request should be gzipped.
type RequestFilter interface {
// ShouldCompress tells if gzip compression
// should be done on the request.
ShouldCompress(*http.Request) bool
......@@ -26,7 +26,7 @@ func DefaultExtFilter() ExtFilter {
return m
}
// ExtFilter is Filter for file name extensions.
// ExtFilter is RequestFilter for file name extensions.
type ExtFilter struct {
// Exts is the file name extensions to accept
Exts Set
......@@ -43,7 +43,7 @@ func (e ExtFilter) ShouldCompress(r *http.Request) bool {
return e.Exts.Contains(ExtWildCard) || e.Exts.Contains(ext)
}
// PathFilter is Filter for request path.
// PathFilter is RequestFilter for request path.
type PathFilter struct {
// IgnoredPaths is the paths to ignore
IgnoredPaths Set
......
......@@ -47,7 +47,7 @@ func TestSet(t *testing.T) {
}
func TestExtFilter(t *testing.T) {
var filter Filter = ExtFilter{make(Set)}
var filter RequestFilter = ExtFilter{make(Set)}
for _, e := range []string{".txt", ".html", ".css", ".md"} {
filter.(ExtFilter).Exts.Add(e)
}
......@@ -86,7 +86,7 @@ func TestPathFilter(t *testing.T) {
paths := []string{
"/a", "/b", "/c", "/de",
}
var filter Filter = PathFilter{make(Set)}
var filter RequestFilter = PathFilter{make(Set)}
for _, p := range paths {
filter.(PathFilter).IgnoredPaths.Add(p)
}
......
package gzip
import (
"compress/gzip"
"net/http"
"strconv"
)
// ResponseFilter determines if the response should be gzipped.
type ResponseFilter interface {
ShouldCompress(http.ResponseWriter) bool
}
// LengthFilter is ResponseFilter for minimum content length.
type LengthFilter int64
// ShouldCompress returns if content length is greater than or
// equals to minimum length.
func (l LengthFilter) ShouldCompress(w http.ResponseWriter) bool {
contentLength := w.Header().Get("Content-Length")
length, err := strconv.ParseInt(contentLength, 10, 64)
if err != nil || length == 0 {
return false
}
return l != 0 && int64(l) <= length
}
// ResponseFilterWriter validates ResponseFilters. It writes
// gzip compressed data if ResponseFilters are satisfied or
// uncompressed data otherwise.
type ResponseFilterWriter struct {
filters []ResponseFilter
shouldCompress bool
statusCodeWritten bool
*gzipResponseWriter
}
// NewResponseFilterWriter creates and initializes a new ResponseFilterWriter.
func NewResponseFilterWriter(filters []ResponseFilter, gz *gzipResponseWriter) *ResponseFilterWriter {
return &ResponseFilterWriter{filters: filters, gzipResponseWriter: gz}
}
// Write wraps underlying WriteHeader method and compresses if filters
// are satisfied.
func (r *ResponseFilterWriter) WriteHeader(code int) {
// Determine if compression should be used or not.
r.shouldCompress = true
for _, filter := range r.filters {
if !filter.ShouldCompress(r) {
r.shouldCompress = false
break
}
}
if r.shouldCompress {
// replace discard writer with ResponseWriter
if gzWriter, ok := r.gzipResponseWriter.Writer.(*gzip.Writer); ok {
gzWriter.Reset(r.ResponseWriter)
}
// use gzip WriteHeader to include and delete
// necessary headers
r.gzipResponseWriter.WriteHeader(code)
} else {
r.ResponseWriter.WriteHeader(code)
}
r.statusCodeWritten = true
}
// Write wraps underlying Write method and compresses if filters
// are satisfied
func (r *ResponseFilterWriter) Write(b []byte) (int, error) {
if !r.statusCodeWritten {
r.WriteHeader(http.StatusOK)
}
if r.shouldCompress {
return r.gzipResponseWriter.Write(b)
}
return r.ResponseWriter.Write(b)
}
package gzip
import (
"compress/gzip"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"github.com/mholt/caddy/middleware"
)
func TestLengthFilter(t *testing.T) {
var filters []ResponseFilter = []ResponseFilter{
LengthFilter(100),
LengthFilter(1000),
LengthFilter(0),
}
var tests = []struct {
length int64
shouldCompress [3]bool
}{
{20, [3]bool{false, false, false}},
{50, [3]bool{false, false, false}},
{100, [3]bool{true, false, false}},
{500, [3]bool{true, false, false}},
{1000, [3]bool{true, true, false}},
{1500, [3]bool{true, true, false}},
}
for i, ts := range tests {
for j, filter := range filters {
r := httptest.NewRecorder()
r.Header().Set("Content-Length", fmt.Sprint(ts.length))
wWriter := NewResponseFilterWriter([]ResponseFilter{filter}, &gzipResponseWriter{gzip.NewWriter(r), r, false})
if filter.ShouldCompress(wWriter) != ts.shouldCompress[j] {
t.Errorf("Test %v: Expected %v found %v", i, ts.shouldCompress[j], filter.ShouldCompress(r))
}
}
}
}
func TestResponseFilterWriter(t *testing.T) {
tests := []struct {
body string
shouldCompress bool
}{
{"Hello\t\t\t\n", false},
{"Hello the \t\t\t world is\n\n\n great", true},
{"Hello \t\t\nfrom gzip", true},
{"Hello gzip\n", false},
}
filters := []ResponseFilter{
LengthFilter(15),
}
server := Gzip{Configs: []Config{
{ResponseFilters: filters},
}}
for i, ts := range tests {
server.Next = middleware.HandlerFunc(func(w http.ResponseWriter, r *http.Request) (int, error) {
w.Header().Set("Content-Length", fmt.Sprint(len(ts.body)))
w.Write([]byte(ts.body))
return 200, nil
})
r := urlRequest("/")
r.Header.Set("Accept-Encoding", "gzip")
w := httptest.NewRecorder()
server.ServeHTTP(w, r)
resp := w.Body.String()
if !ts.shouldCompress {
if resp != ts.body {
t.Errorf("Test %v: No compression expected, found %v", i, resp)
}
} else {
if resp == ts.body {
t.Errorf("Test %v: Compression expected, found %v", i, resp)
}
}
}
}
......@@ -5,6 +5,8 @@ import (
"encoding/hex"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"path/filepath"
"strings"
......@@ -103,8 +105,12 @@ func generateStaticHTML(md Markdown, cfg *Config) error {
reqPath = filepath.ToSlash(reqPath)
reqPath = "/" + reqPath
// Create empty requests and url to cater for template values.
req, _ := http.NewRequest("", "/", nil)
urlVar, _ := url.Parse("/")
// Generate the static file
ctx := middleware.Context{Root: md.FileSys}
ctx := middleware.Context{Root: md.FileSys, Req: req, URL: urlVar}
_, err = md.Process(cfg, reqPath, body, ctx)
if err != nil {
return err
......
package markdown
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"
"time"
"github.com/BurntSushi/toml"
"gopkg.in/yaml.v2"
"time"
)
// Metadata stores a page's metadata
......@@ -73,23 +71,20 @@ type JSONMetadataParser struct {
// Parse the metadata
func (j *JSONMetadataParser) Parse(b []byte) ([]byte, error) {
b, markdown, err := extractMetadata(j, b)
if err != nil {
return markdown, err
}
m := make(map[string]interface{})
// Read the preceding JSON object
decoder := json.NewDecoder(bytes.NewReader(b))
if err := decoder.Decode(&m); err != nil {
return b, err
return markdown, err
}
j.metadata.load(m)
// Retrieve remaining bytes after decoding
buf := make([]byte, len(b))
n, err := decoder.Buffered().Read(buf)
if err != nil {
return b, err
}
return buf[:n], nil
return markdown, nil
}
// Metadata returns parsed metadata. It should be called
......@@ -183,43 +178,29 @@ func (y *YAMLMetadataParser) Closing() []byte {
// It returns the metadata, the remaining bytes (markdown), and an error, if any.
func extractMetadata(parser MetadataParser, b []byte) (metadata []byte, markdown []byte, err error) {
b = bytes.TrimSpace(b)
reader := bufio.NewReader(bytes.NewBuffer(b))
// Read first line, which should indicate metadata or not
line, err := reader.ReadBytes('\n')
if err != nil || !bytes.Equal(bytes.TrimSpace(line), parser.Opening()) {
openingLine := parser.Opening()
closingLine := parser.Closing()
if !bytes.HasPrefix(b, openingLine) {
return nil, b, fmt.Errorf("first line missing expected metadata identifier")
}
// buffer for metadata contents
metaBuf := bytes.Buffer{}
// Read remaining lines until closing identifier is found
for {
line, err := reader.ReadBytes('\n')
if err != nil && err != io.EOF {
return nil, nil, err
}
// if closing identifier found, the remaining bytes must be markdown content
if bytes.Equal(bytes.TrimSpace(line), parser.Closing()) {
break
metaStart := len(openingLine)
if _, ok := parser.(*JSONMetadataParser); ok {
metaStart = 0
}
// if file ended, by this point no closing identifier was found
if err == io.EOF {
metaEnd := bytes.Index(b[metaStart:], closingLine)
if metaEnd == -1 {
return nil, nil, fmt.Errorf("metadata not closed ('%s' not found)", parser.Closing())
}
metaBuf.Write(line)
metaBuf.WriteString("\r\n")
metaEnd += metaStart
if _, ok := parser.(*JSONMetadataParser); ok {
metaEnd += len(closingLine)
}
// By now, the rest of the buffer contains markdown content
contentBuf := new(bytes.Buffer)
io.Copy(contentBuf, reader)
return metaBuf.Bytes(), contentBuf.Bytes(), nil
metadata = b[metaStart:metaEnd]
markdown = b[metaEnd:]
if _, ok := parser.(*JSONMetadataParser); !ok {
markdown = b[metaEnd+len(closingLine):]
}
return metadata, markdown, nil
}
// findParser finds the parser using line that contains opening identifier
......
This diff is collapsed.
......@@ -13,14 +13,14 @@ func TestWatcher(t *testing.T) {
interval := time.Millisecond * 100
i := 0
out := ""
syncChan := make(chan struct{})
stopChan := TickerFunc(interval, func() {
i++
out += fmt.Sprint(i)
syncChan <- struct{}{}
})
// wait little more because of concurrency
time.Sleep(interval * 9)
stopChan <- struct{}{}
if !strings.HasPrefix(out, expected) {
sleepInSync(8, syncChan, stopChan)
if out != expected {
t.Fatalf("Expected to have prefix %v, found %v", expected, out)
}
out = ""
......@@ -31,8 +31,9 @@ func TestWatcher(t *testing.T) {
mu.Lock()
out += fmt.Sprint(i)
mu.Unlock()
syncChan <- struct{}{}
})
time.Sleep(interval * 10)
sleepInSync(9, syncChan, stopChan)
mu.Lock()
res := out
mu.Unlock()
......@@ -40,3 +41,10 @@ func TestWatcher(t *testing.T) {
t.Fatalf("expected (%v) must be a proper prefix of out(%v).", expected, out)
}
}
func sleepInSync(times int, syncChan chan struct{}, stopChan chan struct{}) {
for i := 0; i < times; i++ {
<-syncChan
}
stopChan <- struct{}{}
}
......@@ -3,6 +3,7 @@ package middleware
import (
"net"
"net/http"
"net/url"
"path"
"strconv"
"strings"
......@@ -15,6 +16,7 @@ import (
// NewReplacer to get one of these.
type Replacer interface {
Replace(string) string
Set(key, value string)
}
type replacer struct {
......@@ -40,7 +42,9 @@ func NewReplacer(r *http.Request, rr *responseRecorder, emptyValue string) Repla
}(),
"{host}": r.Host,
"{path}": r.URL.Path,
"{path_escaped}": url.QueryEscape(r.URL.Path),
"{query}": r.URL.RawQuery,
"{query_escaped}": url.QueryEscape(r.URL.RawQuery),
"{fragment}": r.URL.Fragment,
"{proto}": r.Proto,
"{remote}": func() string {
......@@ -61,6 +65,7 @@ func NewReplacer(r *http.Request, rr *responseRecorder, emptyValue string) Repla
return port
}(),
"{uri}": r.URL.RequestURI(),
"{uri_escaped}": url.QueryEscape(r.URL.RequestURI()),
"{when}": func() string {
return time.Now().Format(timeFormat)
}(),
......@@ -81,9 +86,9 @@ func NewReplacer(r *http.Request, rr *responseRecorder, emptyValue string) Repla
rep.replacements["{latency}"] = time.Since(rr.start).String()
}
// Header placeholders
for header, val := range r.Header {
rep.replacements[headerReplacer+header+"}"] = strings.Join(val, ",")
// Header placeholders (case-insensitive)
for header, values := range r.Header {
rep.replacements[headerReplacer+strings.ToLower(header)+"}"] = strings.Join(values, ",")
}
return rep
......@@ -92,27 +97,39 @@ func NewReplacer(r *http.Request, rr *responseRecorder, emptyValue string) Repla
// Replace performs a replacement of values on s and returns
// the string with the replaced values.
func (r replacer) Replace(s string) string {
for placeholder, replacement := range r.replacements {
if replacement == "" {
replacement = r.emptyValue
}
s = strings.Replace(s, placeholder, replacement, -1)
}
// Replace any header placeholders that weren't found
// Header replacements - these are case-insensitive, so we can't just use strings.Replace()
for strings.Contains(s, headerReplacer) {
idxStart := strings.Index(s, headerReplacer)
endOffset := idxStart + len(headerReplacer)
idxEnd := strings.Index(s[endOffset:], "}")
if idxEnd > -1 {
s = s[:idxStart] + r.emptyValue + s[endOffset+idxEnd+1:]
placeholder := strings.ToLower(s[idxStart : endOffset+idxEnd+1])
replacement := r.replacements[placeholder]
if replacement == "" {
replacement = r.emptyValue
}
s = s[:idxStart] + replacement + s[endOffset+idxEnd+1:]
} else {
break
}
}
// Regular replacements - these are easier because they're case-sensitive
for placeholder, replacement := range r.replacements {
if replacement == "" {
replacement = r.emptyValue
}
s = strings.Replace(s, placeholder, replacement, -1)
}
return s
}
// Set sets key to value in the replacements map.
func (r replacer) Set(key, value string) {
r.replacements["{"+key+"}"] = value
}
const (
timeFormat = "02/Jan/2006:15:04:05 -0700"
headerReplacer = "{>"
......
......@@ -10,62 +10,115 @@ import (
func TestNewReplacer(t *testing.T) {
w := httptest.NewRecorder()
recordRequest := NewResponseRecorder(w)
userJSON := `{"username": "dennis"}`
reader := strings.NewReader(`{"username": "dennis"}`)
reader := strings.NewReader(userJSON) //Convert string to reader
request, err := http.NewRequest("POST", "http://caddyserver.com", reader) //Create request with JSON body
request, err := http.NewRequest("POST", "http://localhost", reader)
if err != nil {
t.Fatalf("Request Formation Failed \n")
t.Fatal("Request Formation Failed\n")
}
replaceValues := NewReplacer(request, recordRequest, "")
switch v := replaceValues.(type) {
case replacer:
if v.replacements["{host}"] != "caddyserver.com" {
t.Errorf("Expected host to be caddyserver.com")
if v.replacements["{host}"] != "localhost" {
t.Error("Expected host to be localhost")
}
if v.replacements["{method}"] != "POST" {
t.Errorf("Expected request method to be POST")
t.Error("Expected request method to be POST")
}
if v.replacements["{status}"] != "200" {
t.Errorf("Expected status to be 200")
t.Error("Expected status to be 200")
}
default:
t.Fatalf("Return Value from New Replacer expected pass type assertion into a replacer type \n")
t.Fatal("Return Value from New Replacer expected pass type assertion into a replacer type\n")
}
}
func TestReplace(t *testing.T) {
w := httptest.NewRecorder()
recordRequest := NewResponseRecorder(w)
userJSON := `{"username": "dennis"}`
reader := strings.NewReader(`{"username": "dennis"}`)
reader := strings.NewReader(userJSON) //Convert string to reader
request, err := http.NewRequest("POST", "http://caddyserver.com", reader) //Create request with JSON body
request, err := http.NewRequest("POST", "http://localhost", reader)
if err != nil {
t.Fatalf("Request Formation Failed \n")
t.Fatal("Request Formation Failed\n")
}
replaceValues := NewReplacer(request, recordRequest, "")
request.Header.Set("Custom", "foobarbaz")
request.Header.Set("ShorterVal", "1")
repl := NewReplacer(request, recordRequest, "-")
switch v := replaceValues.(type) {
case replacer:
if expected, actual := "This host is localhost.", repl.Replace("This host is {host}."); expected != actual {
t.Errorf("{host} replacement: expected '%s', got '%s'", expected, actual)
}
if expected, actual := "This request method is POST.", repl.Replace("This request method is {method}."); expected != actual {
t.Errorf("{method} replacement: expected '%s', got '%s'", expected, actual)
}
if expected, actual := "The response status is 200.", repl.Replace("The response status is {status}."); expected != actual {
t.Errorf("{status} replacement: expected '%s', got '%s'", expected, actual)
}
if expected, actual := "The Custom header is foobarbaz.", repl.Replace("The Custom header is {>Custom}."); expected != actual {
t.Errorf("{>Custom} replacement: expected '%s', got '%s'", expected, actual)
}
// Test header case-insensitivity
if expected, actual := "The cUsToM header is foobarbaz...", repl.Replace("The cUsToM header is {>cUsToM}..."); expected != actual {
t.Errorf("{>cUsToM} replacement: expected '%s', got '%s'", expected, actual)
}
if v.Replace("This host is {host}") != "This host is caddyserver.com" {
t.Errorf("Expected host replacement failed")
// Test non-existent header/value
if expected, actual := "The Non-Existent header is -.", repl.Replace("The Non-Existent header is {>Non-Existent}."); expected != actual {
t.Errorf("{>Non-Existent} replacement: expected '%s', got '%s'", expected, actual)
}
if v.Replace("This request method is {method}") != "This request method is POST" {
t.Errorf("Expected method replacement failed")
// Test bad placeholder
if expected, actual := "Bad {host placeholder...", repl.Replace("Bad {host placeholder..."); expected != actual {
t.Errorf("bad placeholder: expected '%s', got '%s'", expected, actual)
}
if v.Replace("The response status is {status}") != "The response status is 200" {
t.Errorf("Expected status replacement failed")
// Test bad header placeholder
if expected, actual := "Bad {>Custom placeholder", repl.Replace("Bad {>Custom placeholder"); expected != actual {
t.Errorf("bad header placeholder: expected '%s', got '%s'", expected, actual)
}
default:
t.Fatalf("Return Value from New Replacer expected pass type assertion into a replacer type \n")
// Test bad header placeholder with valid one later
if expected, actual := "Bad -", repl.Replace("Bad {>Custom placeholder {>ShorterVal}"); expected != actual {
t.Errorf("bad header placeholders: expected '%s', got '%s'", expected, actual)
}
// Test shorter header value with multiple placeholders
if expected, actual := "Short value 1 then foobarbaz.", repl.Replace("Short value {>ShorterVal} then {>Custom}."); expected != actual {
t.Errorf("short value: expected '%s', got '%s'", expected, actual)
}
}
func TestSet(t *testing.T) {
w := httptest.NewRecorder()
recordRequest := NewResponseRecorder(w)
reader := strings.NewReader(`{"username": "dennis"}`)
request, err := http.NewRequest("POST", "http://localhost", reader)
if err != nil {
t.Fatalf("Request Formation Failed \n")
}
repl := NewReplacer(request, recordRequest, "")
repl.Set("host", "getcaddy.com")
repl.Set("method", "GET")
repl.Set("status", "201")
repl.Set("variable", "value")
if repl.Replace("This host is {host}") != "This host is getcaddy.com" {
t.Error("Expected host replacement failed")
}
if repl.Replace("This request method is {method}") != "This request method is GET" {
t.Error("Expected method replacement failed")
}
if repl.Replace("The response status is {status}") != "The response status is 201" {
t.Error("Expected status replacement failed")
}
if repl.Replace("The value of variable is {variable}") != "The value of variable is value" {
t.Error("Expected variable replacement failed")
}
}
package rewrite
import (
"fmt"
"net/http"
"regexp"
"strings"
"github.com/mholt/caddy/middleware"
)
const (
// Operators
Is = "is"
Not = "not"
Has = "has"
StartsWith = "starts_with"
EndsWith = "ends_with"
Match = "match"
)
func operatorError(operator string) error {
return fmt.Errorf("Invalid operator %v", operator)
}
func newReplacer(r *http.Request) middleware.Replacer {
return middleware.NewReplacer(r, nil, "")
}
// condition is a rewrite condition.
type condition func(string, string) bool
var conditions = map[string]condition{
Is: isFunc,
Not: notFunc,
Has: hasFunc,
StartsWith: startsWithFunc,
EndsWith: endsWithFunc,
Match: matchFunc,
}
// isFunc is condition for Is operator.
// It checks for equality.
func isFunc(a, b string) bool {
return a == b
}
// notFunc is condition for Not operator.
// It checks for inequality.
func notFunc(a, b string) bool {
return a != b
}
// hasFunc is condition for Has operator.
// It checks if b is a substring of a.
func hasFunc(a, b string) bool {
return strings.Contains(a, b)
}
// startsWithFunc is condition for StartsWith operator.
// It checks if b is a prefix of a.
func startsWithFunc(a, b string) bool {
return strings.HasPrefix(a, b)
}
// endsWithFunc is condition for EndsWith operator.
// It checks if b is a suffix of a.
func endsWithFunc(a, b string) bool {
return strings.HasSuffix(a, b)
}
// matchFunc is condition for Match operator.
// It does regexp matching of a against pattern in b
func matchFunc(a, b string) bool {
matched, _ := regexp.MatchString(b, a)
return matched
}
// If is statement for a rewrite condition.
type If struct {
A string
Operator string
B string
}
// True returns true if the condition is true and false otherwise.
// If r is not nil, it replaces placeholders before comparison.
func (i If) True(r *http.Request) bool {
if c, ok := conditions[i.Operator]; ok {
a, b := i.A, i.B
if r != nil {
replacer := newReplacer(r)
a = replacer.Replace(i.A)
b = replacer.Replace(i.B)
}
return c(a, b)
}
return false
}
// NewIf creates a new If condition.
func NewIf(a, operator, b string) (If, error) {
if _, ok := conditions[operator]; !ok {
return If{}, operatorError(operator)
}
return If{
A: a,
Operator: operator,
B: b,
}, nil
}
package rewrite
import (
"net/http"
"strings"
"testing"
)
func TestConditions(t *testing.T) {
tests := []struct {
condition string
isTrue bool
}{
{"a is b", false},
{"a is a", true},
{"a not b", true},
{"a not a", false},
{"a has a", true},
{"a has b", false},
{"ba has b", true},
{"bab has b", true},
{"bab has bb", false},
{"bab starts_with bb", false},
{"bab starts_with ba", true},
{"bab starts_with bab", true},
{"bab ends_with bb", false},
{"bab ends_with bab", true},
{"bab ends_with ab", true},
{"a match *", false},
{"a match a", true},
{"a match .*", true},
{"a match a.*", true},
{"a match b.*", false},
{"ba match b.*", true},
{"ba match b[a-z]", true},
{"b0 match b[a-z]", false},
{"b0a match b[a-z]", false},
{"b0a match b[a-z]+", false},
{"b0a match b[a-z0-9]+", true},
}
for i, test := range tests {
str := strings.Fields(test.condition)
ifCond, err := NewIf(str[0], str[1], str[2])
if err != nil {
t.Error(err)
}
isTrue := ifCond.True(nil)
if isTrue != test.isTrue {
t.Errorf("Test %v: expected %v found %v", i, test.isTrue, isTrue)
}
}
invalidOperators := []string{"ss", "and", "if"}
for _, op := range invalidOperators {
_, err := NewIf("a", op, "b")
if err == nil {
t.Errorf("Invalid operator %v used, expected error.", op)
}
}
replaceTests := []struct {
url string
condition string
isTrue bool
}{
{"/home", "{uri} match /home", true},
{"/hom", "{uri} match /home", false},
{"/hom", "{uri} starts_with /home", false},
{"/hom", "{uri} starts_with /h", true},
{"/home/.hiddenfile", `{uri} match \/\.(.*)`, true},
{"/home/.hiddendir/afile", `{uri} match \/\.(.*)`, true},
}
for i, test := range replaceTests {
r, err := http.NewRequest("GET", test.url, nil)
if err != nil {
t.Error(err)
}
str := strings.Fields(test.condition)
ifCond, err := NewIf(str[0], str[1], str[2])
if err != nil {
t.Error(err)
}
isTrue := ifCond.True(r)
if isTrue != test.isTrue {
t.Errorf("Test %v: expected %v found %v", i, test.isTrue, isTrue)
}
}
}
......@@ -5,7 +5,6 @@ package rewrite
import (
"fmt"
"net/http"
"net/url"
"path"
"path/filepath"
"regexp"
......@@ -17,13 +16,14 @@ import (
// Rewrite is middleware to rewrite request locations internally before being handled.
type Rewrite struct {
Next middleware.Handler
FileSys http.FileSystem
Rules []Rule
}
// ServeHTTP implements the middleware.Handler interface.
func (rw Rewrite) ServeHTTP(w http.ResponseWriter, r *http.Request) (int, error) {
for _, rule := range rw.Rules {
if ok := rule.Rewrite(r); ok {
if ok := rule.Rewrite(rw.FileSys, r); ok {
break
}
}
......@@ -33,7 +33,7 @@ func (rw Rewrite) ServeHTTP(w http.ResponseWriter, r *http.Request) (int, error)
// Rule describes an internal location rewrite rule.
type Rule interface {
// Rewrite rewrites the internal location of the current request.
Rewrite(*http.Request) bool
Rewrite(http.FileSystem, *http.Request) bool
}
// SimpleRule is a simple rewrite rule.
......@@ -47,23 +47,20 @@ func NewSimpleRule(from, to string) SimpleRule {
}
// Rewrite rewrites the internal location of the current request.
func (s SimpleRule) Rewrite(r *http.Request) bool {
func (s SimpleRule) Rewrite(fs http.FileSystem, r *http.Request) bool {
if s.From == r.URL.Path {
// take note of this rewrite for internal use by fastcgi
// all we need is the URI, not full URL
r.Header.Set(headerFieldName, r.URL.RequestURI())
// replace variables
to := path.Clean(middleware.NewReplacer(r, nil, "").Replace(s.To))
r.URL.Path = to
return true
// attempt rewrite
return To(fs, r, s.To, newReplacer(r))
}
return false
}
// RegexpRule is a rewrite rule based on a regular expression
type RegexpRule struct {
// ComplexRule is a rewrite rule based on a regular expression
type ComplexRule struct {
// Path base. Request to this path and subpaths will be rewritten
Base string
......@@ -73,18 +70,26 @@ type RegexpRule struct {
// Extensions to filter by
Exts []string
// Rewrite conditions
Ifs []If
*regexp.Regexp
}
// NewRegexpRule creates a new RegexpRule. It returns an error if regexp
// pattern (pattern) or extensions (ext) are invalid.
func NewRegexpRule(base, pattern, to string, ext []string) (*RegexpRule, error) {
r, err := regexp.Compile(pattern)
func NewComplexRule(base, pattern, to string, ext []string, ifs []If) (*ComplexRule, error) {
// validate regexp if present
var r *regexp.Regexp
if pattern != "" {
var err error
r, err = regexp.Compile(pattern)
if err != nil {
return nil, err
}
}
// validate extensions
// validate extensions if present
for _, v := range ext {
if len(v) < 2 || (len(v) < 3 && v[0] == '!') {
// check if no extension is specified
......@@ -94,17 +99,19 @@ func NewRegexpRule(base, pattern, to string, ext []string) (*RegexpRule, error)
}
}
return &RegexpRule{
base,
to,
ext,
r,
return &ComplexRule{
Base: base,
To: to,
Exts: ext,
Ifs: ifs,
Regexp: r,
}, nil
}
// Rewrite rewrites the internal location of the current request.
func (r *RegexpRule) Rewrite(req *http.Request) bool {
func (r *ComplexRule) Rewrite(fs http.FileSystem, req *http.Request) bool {
rPath := req.URL.Path
replacer := newReplacer(req)
// validate base
if !middleware.Path(rPath).Matches(r.Base) {
......@@ -122,36 +129,35 @@ func (r *RegexpRule) Rewrite(req *http.Request) bool {
start--
}
// validate regexp
if !r.MatchString(rPath[start:]) {
// validate regexp if present
if r.Regexp != nil {
matches := r.FindStringSubmatch(rPath[start:])
switch len(matches) {
case 0:
// no match
return false
default:
// set regexp match variables {1}, {2} ...
for i := 1; i < len(matches); i++ {
replacer.Set(fmt.Sprint(i), matches[i])
}
}
}
// replace variables
to := path.Clean(middleware.NewReplacer(req, nil, "").Replace(r.To))
// validate resulting path
url, err := url.Parse(to)
if err != nil {
// validate rewrite conditions
for _, i := range r.Ifs {
if !i.True(req) {
return false
}
// take note of this rewrite for internal use by fastcgi
// all we need is the URI, not full URL
req.Header.Set(headerFieldName, req.URL.RequestURI())
// perform rewrite
req.URL.Path = url.Path
if url.RawQuery != "" {
// overwrite query string if present
req.URL.RawQuery = url.RawQuery
}
return true
// attempt rewrite
return To(fs, req, r.To, replacer)
}
// matchExt matches rPath against registered file extensions.
// Returns true if a match is found and false otherwise.
func (r *RegexpRule) matchExt(rPath string) bool {
func (r *ComplexRule) matchExt(rPath string) bool {
f := filepath.Base(rPath)
ext := path.Ext(f)
if ext == "" {
......
......@@ -4,9 +4,8 @@ import (
"fmt"
"net/http"
"net/http/httptest"
"testing"
"strings"
"testing"
"github.com/mholt/caddy/middleware"
)
......@@ -19,9 +18,10 @@ func TestRewrite(t *testing.T) {
NewSimpleRule("/a", "/b"),
NewSimpleRule("/b", "/b{uri}"),
},
FileSys: http.Dir("."),
}
regexpRules := [][]string{
regexps := [][]string{
{"/reg/", ".*", "/to", ""},
{"/r/", "[a-z]+", "/toaz", "!.html|"},
{"/url/", "a([a-z0-9]*)s([A-Z]{2})", "/to/{path}", ""},
......@@ -31,14 +31,17 @@ func TestRewrite(t *testing.T) {
{"/abcd/", "ab", "/a/{dir}/{file}", ".html|"},
{"/abcde/", "ab", "/a#{fragment}", ".html|"},
{"/ab/", `.*\.jpg`, "/ajpg", ""},
{"/reggrp", `/ad/([0-9]+)([a-z]*)`, "/a{1}/{2}", ""},
{"/reg2grp", `(.*)`, "/{1}", ""},
{"/reg3grp", `(.*)/(.*)/(.*)`, "/{1}{2}{3}", ""},
}
for _, regexpRule := range regexpRules {
for _, regexpRule := range regexps {
var ext []string
if s := strings.Split(regexpRule[3], "|"); len(s) > 1 {
ext = s[:len(s)-1]
}
rule, err := NewRegexpRule(regexpRule[0], regexpRule[1], regexpRule[2], ext)
rule, err := NewComplexRule(regexpRule[0], regexpRule[1], regexpRule[2], ext, nil)
if err != nil {
t.Fatal(err)
}
......@@ -81,6 +84,12 @@ func TestRewrite(t *testing.T) {
{"/abcde/abcde.html", "/a"},
{"/abcde/abcde.html#1234", "/a#1234"},
{"/ab/ab.jpg", "/ajpg"},
{"/reggrp/ad/12", "/a12"},
{"/reggrp/ad/124a", "/a124/a"},
{"/reggrp/ad/124abc", "/a124/abc"},
{"/reg2grp/ad/124abc", "/ad/124abc"},
{"/reg3grp/ad/aa/66", "/adaa66"},
{"/reg3grp/ad612/n1n/ab", "/ad612n1nab"},
}
for i, test := range tests {
......
empty
\ No newline at end of file
package rewrite
import (
"log"
"net/http"
"net/url"
"path"
"strings"
"github.com/mholt/caddy/middleware"
)
// To attempts rewrite. It attempts to rewrite to first valid path
// or the last path if none of the paths are valid.
// Returns true if rewrite is successful and false otherwise.
func To(fs http.FileSystem, r *http.Request, to string, replacer middleware.Replacer) bool {
tos := strings.Fields(to)
// try each rewrite paths
t := ""
for _, v := range tos {
t = path.Clean(replacer.Replace(v))
// add trailing slash for directories, if present
if strings.HasSuffix(v, "/") && !strings.HasSuffix(t, "/") {
t += "/"
}
// validate file
if isValidFile(fs, t) {
break
}
}
// validate resulting path
u, err := url.Parse(t)
if err != nil {
// Let the user know we got here. Rewrite is expected but
// the resulting url is invalid.
log.Printf("[ERROR] rewrite: resulting path '%v' is invalid. error: %v", t, err)
return false
}
// take note of this rewrite for internal use by fastcgi
// all we need is the URI, not full URL
r.Header.Set(headerFieldName, r.URL.RequestURI())
// perform rewrite
r.URL.Path = u.Path
if u.RawQuery != "" {
// overwrite query string if present
r.URL.RawQuery = u.RawQuery
}
if u.Fragment != "" {
// overwrite fragment if present
r.URL.Fragment = u.Fragment
}
return true
}
// isValidFile checks if file exists on the filesystem.
// if file ends with `/`, it is validated as a directory.
func isValidFile(fs http.FileSystem, file string) bool {
if fs == nil {
return false
}
f, err := fs.Open(file)
if err != nil {
return false
}
defer f.Close()
stat, err := f.Stat()
if err != nil {
return false
}
// directory
if strings.HasSuffix(file, "/") {
return stat.IsDir()
}
// file
return !stat.IsDir()
}
package rewrite
import (
"net/http"
"net/url"
"testing"
)
func TestTo(t *testing.T) {
fs := http.Dir("testdata")
tests := []struct {
url string
to string
expected string
}{
{"/", "/somefiles", "/somefiles"},
{"/somefiles", "/somefiles /index.php{uri}", "/index.php/somefiles"},
{"/somefiles", "/testfile /index.php{uri}", "/testfile"},
{"/somefiles", "/testfile/ /index.php{uri}", "/index.php/somefiles"},
{"/somefiles", "/somefiles /index.php{uri}", "/index.php/somefiles"},
{"/?a=b", "/somefiles /index.php?{query}", "/index.php?a=b"},
{"/?a=b", "/testfile /index.php?{query}", "/testfile?a=b"},
{"/?a=b", "/testdir /index.php?{query}", "/index.php?a=b"},
{"/?a=b", "/testdir/ /index.php?{query}", "/testdir/?a=b"},
}
uri := func(r *url.URL) string {
uri := r.Path
if r.RawQuery != "" {
uri += "?" + r.RawQuery
}
return uri
}
for i, test := range tests {
r, err := http.NewRequest("GET", test.url, nil)
if err != nil {
t.Error(err)
}
To(fs, r, test.to, newReplacer(r))
if uri(r.URL) != test.expected {
t.Errorf("Test %v: expected %v found %v", i, test.expected, uri(r.URL))
}
}
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment