mirror of
https://github.com/containous/traefik.git
synced 2025-09-06 05:44:21 +03:00
Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
3dfbdd8029 | ||
|
0a98005c1f | ||
|
b5b8ef2e34 | ||
|
1476eba60a | ||
|
06837e2508 | ||
|
1b42030d58 | ||
|
d7d7eb2b83 | ||
|
47bc2aaf62 |
@@ -1,5 +1,11 @@
|
||||
# Change Log
|
||||
|
||||
## [v1.3.8](https://github.com/containous/traefik/tree/v1.3.8) (2017-09-07)
|
||||
[All Commits](https://github.com/containous/traefik/compare/v1.3.7...v1.3.8)
|
||||
|
||||
**Bug fixes:**
|
||||
- **[middleware]** Compress and websocket ([#2079](https://github.com/containous/traefik/pull/2079) by [ldez](https://github.com/ldez))
|
||||
|
||||
## [v1.3.7](https://github.com/containous/traefik/tree/v1.3.7) (2017-08-25)
|
||||
[All Commits](https://github.com/containous/traefik/compare/v1.3.6...v1.3.7)
|
||||
|
||||
|
10
docs.Dockerfile
Normal file
10
docs.Dockerfile
Normal file
@@ -0,0 +1,10 @@
|
||||
FROM alpine:3.7
|
||||
|
||||
ENV PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/root/.local/bin
|
||||
|
||||
COPY requirements.txt /mkdocs/
|
||||
WORKDIR /mkdocs
|
||||
VOLUME /mkdocs
|
||||
|
||||
RUN apk --no-cache --no-progress add py-pip \
|
||||
&& pip install --trusted-host pypi.python.org --user -r requirements.txt
|
6
glide.lock
generated
6
glide.lock
generated
@@ -1,4 +1,4 @@
|
||||
hash: 110ae989ba77357a6d7cc720f671765b06857cf447296a294a461acd2574a020
|
||||
hash: 132846decb297148a6365019a486fc3cbadc96db8a20393bbf81512c0efd6197
|
||||
updated: 2017-08-25T11:52:16.848940186+02:00
|
||||
imports:
|
||||
- name: cloud.google.com/go
|
||||
@@ -320,7 +320,7 @@ imports:
|
||||
- name: github.com/mvdan/xurls
|
||||
version: db96455566f05ffe42bd6ac671f05eeb1152b45d
|
||||
- name: github.com/NYTimes/gziphandler
|
||||
version: 316adfc72ed3b0157975917adf62ba2dc31842ce
|
||||
version: 824b33f2a7457025697878c865c323f801118043
|
||||
repo: https://github.com/containous/gziphandler.git
|
||||
vcs: git
|
||||
- name: github.com/ogier/pflag
|
||||
@@ -411,7 +411,7 @@ imports:
|
||||
- name: github.com/vdemeester/docker-events
|
||||
version: be74d4929ec1ad118df54349fda4b0cba60f849b
|
||||
- name: github.com/vulcand/oxy
|
||||
version: 7baa97f97557ff96be2798972dc831c7ba0a46e7
|
||||
version: 6c94d2888dba2b1a15a89b8a2ca515fc85e07477
|
||||
repo: https://github.com/containous/oxy.git
|
||||
vcs: git
|
||||
subpackages:
|
||||
|
@@ -8,7 +8,7 @@ import:
|
||||
- package: github.com/cenk/backoff
|
||||
- package: github.com/containous/flaeg
|
||||
- package: github.com/vulcand/oxy
|
||||
version: 7baa97f97557ff96be2798972dc831c7ba0a46e7
|
||||
version: 6c94d2888dba2b1a15a89b8a2ca515fc85e07477
|
||||
repo: https://github.com/containous/oxy.git
|
||||
vcs: git
|
||||
subpackages:
|
||||
@@ -88,6 +88,7 @@ import:
|
||||
- package: github.com/abbot/go-http-auth
|
||||
- package: github.com/NYTimes/gziphandler
|
||||
repo: https://github.com/containous/gziphandler.git
|
||||
version: ^v1002.0.0
|
||||
vcs: git
|
||||
- package: github.com/docker/leadership
|
||||
- package: github.com/satori/go.uuid
|
||||
|
@@ -17,7 +17,10 @@ func (c *Compress) ServeHTTP(rw http.ResponseWriter, r *http.Request, next http.
|
||||
}
|
||||
|
||||
func gzipHandler(h http.Handler) http.Handler {
|
||||
wrapper, err := gziphandler.NewGzipHandler(gzip.DefaultCompression, gziphandler.DefaultMinSize, &gziphandler.GzipResponseWriterWrapper{})
|
||||
wrapper, err := gziphandler.GzipHandlerWithOpts(
|
||||
&gziphandler.GzipResponseWriterWrapper{},
|
||||
gziphandler.CompressionLevel(gzip.DefaultCompression),
|
||||
gziphandler.MinSize(gziphandler.DefaultMinSize))
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
}
|
||||
|
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/codegangsta/negroni"
|
||||
"github.com/containous/traefik/testhelpers"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -80,63 +81,114 @@ func TestShouldNotCompressWhenNoAcceptEncodingHeader(t *testing.T) {
|
||||
assert.EqualValues(t, rw.Body.Bytes(), fakeBody)
|
||||
}
|
||||
|
||||
func TestIntegrationShouldNotCompressWhenContentAlreadyCompressed(t *testing.T) {
|
||||
func TestIntegrationShouldNotCompress(t *testing.T) {
|
||||
fakeCompressedBody := generateBytes(100000)
|
||||
|
||||
handler := func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Add(contentEncodingHeader, gzipValue)
|
||||
rw.Header().Add(varyHeader, acceptEncodingHeader)
|
||||
rw.Write(fakeCompressedBody)
|
||||
}
|
||||
|
||||
comp := &Compress{}
|
||||
|
||||
negro := negroni.New(comp)
|
||||
negro.UseHandlerFunc(handler)
|
||||
ts := httptest.NewServer(negro)
|
||||
defer ts.Close()
|
||||
testCases := []struct {
|
||||
name string
|
||||
handler func(rw http.ResponseWriter, r *http.Request)
|
||||
expectedStatusCode int
|
||||
}{
|
||||
{
|
||||
name: "when content already compressed",
|
||||
handler: func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Add(contentEncodingHeader, gzipValue)
|
||||
rw.Header().Add(varyHeader, acceptEncodingHeader)
|
||||
rw.Write(fakeCompressedBody)
|
||||
},
|
||||
expectedStatusCode: http.StatusOK,
|
||||
},
|
||||
{
|
||||
name: "when content already compressed and status code Created",
|
||||
handler: func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Add(contentEncodingHeader, gzipValue)
|
||||
rw.Header().Add(varyHeader, acceptEncodingHeader)
|
||||
rw.WriteHeader(http.StatusCreated)
|
||||
rw.Write(fakeCompressedBody)
|
||||
},
|
||||
expectedStatusCode: http.StatusCreated,
|
||||
},
|
||||
}
|
||||
|
||||
client := &http.Client{}
|
||||
req := testhelpers.MustNewRequest(http.MethodGet, ts.URL, nil)
|
||||
req.Header.Add(acceptEncodingHeader, gzipValue)
|
||||
for _, test := range testCases {
|
||||
|
||||
resp, err := client.Do(req)
|
||||
assert.NoError(t, err, "there should be no error")
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
negro := negroni.New(comp)
|
||||
negro.UseHandlerFunc(test.handler)
|
||||
ts := httptest.NewServer(negro)
|
||||
defer ts.Close()
|
||||
|
||||
assert.Equal(t, gzipValue, resp.Header.Get(contentEncodingHeader))
|
||||
assert.Equal(t, acceptEncodingHeader, resp.Header.Get(varyHeader))
|
||||
req := testhelpers.MustNewRequest(http.MethodGet, ts.URL, nil)
|
||||
req.Header.Add(acceptEncodingHeader, gzipValue)
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
assert.EqualValues(t, fakeCompressedBody, body)
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expectedStatusCode, resp.StatusCode)
|
||||
|
||||
assert.Equal(t, gzipValue, resp.Header.Get(contentEncodingHeader))
|
||||
assert.Equal(t, acceptEncodingHeader, resp.Header.Get(varyHeader))
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
assert.EqualValues(t, fakeCompressedBody, body)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIntegrationShouldCompressWhenAcceptEncodingHeaderIsPresent(t *testing.T) {
|
||||
func TestIntegrationShouldCompress(t *testing.T) {
|
||||
fakeBody := generateBytes(100000)
|
||||
|
||||
handler := func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Write(fakeBody)
|
||||
testCases := []struct {
|
||||
name string
|
||||
handler func(rw http.ResponseWriter, r *http.Request)
|
||||
expectedStatusCode int
|
||||
}{
|
||||
{
|
||||
name: "when AcceptEncoding header is present",
|
||||
handler: func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Write(fakeBody)
|
||||
},
|
||||
expectedStatusCode: http.StatusOK,
|
||||
},
|
||||
{
|
||||
name: "when AcceptEncoding header is present and status code Created",
|
||||
handler: func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.WriteHeader(http.StatusCreated)
|
||||
rw.Write(fakeBody)
|
||||
},
|
||||
expectedStatusCode: http.StatusCreated,
|
||||
},
|
||||
}
|
||||
|
||||
comp := &Compress{}
|
||||
for _, test := range testCases {
|
||||
|
||||
negro := negroni.New(comp)
|
||||
negro.UseHandlerFunc(handler)
|
||||
ts := httptest.NewServer(negro)
|
||||
defer ts.Close()
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
comp := &Compress{}
|
||||
|
||||
client := &http.Client{}
|
||||
req := testhelpers.MustNewRequest(http.MethodGet, ts.URL, nil)
|
||||
req.Header.Add(acceptEncodingHeader, gzipValue)
|
||||
negro := negroni.New(comp)
|
||||
negro.UseHandlerFunc(test.handler)
|
||||
ts := httptest.NewServer(negro)
|
||||
defer ts.Close()
|
||||
|
||||
resp, err := client.Do(req)
|
||||
assert.NoError(t, err, "there should be no error")
|
||||
req := testhelpers.MustNewRequest(http.MethodGet, ts.URL, nil)
|
||||
req.Header.Add(acceptEncodingHeader, gzipValue)
|
||||
|
||||
assert.Equal(t, gzipValue, resp.Header.Get(contentEncodingHeader))
|
||||
assert.Equal(t, acceptEncodingHeader, resp.Header.Get(varyHeader))
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if assert.ObjectsAreEqualValues(body, fakeBody) {
|
||||
assert.Fail(t, "expected a compressed body", "got %v", body)
|
||||
assert.Equal(t, test.expectedStatusCode, resp.StatusCode)
|
||||
|
||||
assert.Equal(t, gzipValue, resp.Header.Get(contentEncodingHeader))
|
||||
assert.Equal(t, acceptEncodingHeader, resp.Header.Get(varyHeader))
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
if assert.ObjectsAreEqualValues(body, fakeBody) {
|
||||
assert.Fail(t, "expected a compressed body", "got %v", body)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,3 +1,3 @@
|
||||
mkdocs>=0.16.1
|
||||
pymdown-extensions>=1.4
|
||||
mkdocs-bootswatch>=0.4.0
|
||||
mkdocs>=0.16.1,<0.17.0
|
||||
pymdown-extensions==1.4
|
||||
mkdocs-bootswatch==0.4.0
|
||||
|
@@ -18,17 +18,19 @@ if [ -z "$DATE" ]; then
|
||||
DATE=$(date -u '+%Y-%m-%d_%I:%M:%S%p')
|
||||
fi
|
||||
|
||||
echo "Building ${VERSION} ${CODENAME} ${DATE}"
|
||||
|
||||
GIT_REPO_URL='github.com/containous/traefik/version'
|
||||
GO_BUILD_CMD="go build -ldflags"
|
||||
GO_BUILD_OPT="-s -w -X ${GIT_REPO_URL}.Version=$VERSION -X ${GIT_REPO_URL}.Codename=$CODENAME -X ${GIT_REPO_URL}.BuildDate=$DATE"
|
||||
GO_BUILD_OPT="-s -w -X ${GIT_REPO_URL}.Version=${VERSION} -X ${GIT_REPO_URL}.Codename=${CODENAME} -X ${GIT_REPO_URL}.BuildDate=${DATE}"
|
||||
|
||||
# Build 386 amd64 binaries
|
||||
OS_PLATFORM_ARG=(linux windows darwin)
|
||||
OS_ARCH_ARG=(amd64)
|
||||
for OS in ${OS_PLATFORM_ARG[@]}; do
|
||||
for ARCH in ${OS_ARCH_ARG[@]}; do
|
||||
echo "Building binary for $OS/$ARCH..."
|
||||
GOARCH=$ARCH GOOS=$OS CGO_ENABLED=0 $GO_BUILD_CMD "$GO_BUILD_OPT" -o "dist/traefik_$OS-$ARCH" ./cmd/traefik/
|
||||
echo "Building binary for ${OS}/${ARCH}..."
|
||||
GOARCH=${ARCH} GOOS=${OS} CGO_ENABLED=0 ${GO_BUILD_CMD} "${GO_BUILD_OPT}" -o "dist/traefik_${OS}-${ARCH}" ./cmd/traefik/
|
||||
done
|
||||
done
|
||||
|
||||
@@ -38,7 +40,7 @@ OS_PLATFORM_ARG=(linux)
|
||||
OS_ARCH_ARG=(arm64)
|
||||
for OS in ${OS_PLATFORM_ARG[@]}; do
|
||||
for ARCH in ${OS_ARCH_ARG[@]}; do
|
||||
echo "Building binary for $OS/$ARCH..."
|
||||
GOARCH=$ARCH GOOS=$OS CGO_ENABLED=0 $GO_BUILD_CMD "$GO_BUILD_OPT" -o "dist/traefik_$OS-$ARCH" ./cmd/traefik/
|
||||
echo "Building binary for ${OS}/${ARCH}..."
|
||||
GOARCH=${ARCH} GOOS=${OS} CGO_ENABLED=0 ${GO_BUILD_CMD} "${GO_BUILD_OPT}" -o "dist/traefik_${OS}-${ARCH}" ./cmd/traefik/
|
||||
done
|
||||
done
|
||||
|
@@ -18,9 +18,11 @@ if [ -z "$DATE" ]; then
|
||||
DATE=$(date -u '+%Y-%m-%d_%I:%M:%S%p')
|
||||
fi
|
||||
|
||||
echo "Building ${VERSION} ${CODENAME} ${DATE}"
|
||||
|
||||
GIT_REPO_URL='github.com/containous/traefik/version'
|
||||
GO_BUILD_CMD="go build -ldflags"
|
||||
GO_BUILD_OPT="-s -w -X ${GIT_REPO_URL}.Version=$VERSION -X ${GIT_REPO_URL}.Codename=$CODENAME -X ${GIT_REPO_URL}.BuildDate=$DATE"
|
||||
GO_BUILD_OPT="-s -w -X ${GIT_REPO_URL}.Version=${VERSION} -X ${GIT_REPO_URL}.Codename=${CODENAME} -X ${GIT_REPO_URL}.BuildDate=${DATE}"
|
||||
|
||||
# Build arm binaries
|
||||
OS_PLATFORM_ARG=(linux windows darwin)
|
||||
@@ -28,7 +30,7 @@ OS_ARCH_ARG=(386)
|
||||
for OS in ${OS_PLATFORM_ARG[@]}; do
|
||||
for ARCH in ${OS_ARCH_ARG[@]}; do
|
||||
echo "Building binary for $OS/$ARCH..."
|
||||
GOARCH=$ARCH GOOS=$OS CGO_ENABLED=0 $GO_BUILD_CMD "$GO_BUILD_OPT" -o "dist/traefik_$OS-$ARCH" ./cmd/traefik/
|
||||
GOARCH=${ARCH} GOOS=${OS} CGO_ENABLED=0 ${GO_BUILD_CMD} "$GO_BUILD_OPT" -o "dist/traefik_$OS-$ARCH" ./cmd/traefik/
|
||||
done
|
||||
done
|
||||
|
||||
@@ -38,18 +40,21 @@ OS_ARCH_ARG=(386 amd64)
|
||||
for OS in ${OS_PLATFORM_ARG[@]}; do
|
||||
for ARCH in ${OS_ARCH_ARG[@]}; do
|
||||
# Get rid of existing binaries
|
||||
rm -f dist/traefik_$OS-$ARCH
|
||||
rm -f dist/traefik_${OS}-${ARCH}
|
||||
echo "Building binary for $OS/$ARCH..."
|
||||
GOARCH=$ARCH GOOS=$OS CGO_ENABLED=0 $GO_BUILD_CMD "$GO_BUILD_OPT" -o "dist/traefik_$OS-$ARCH" ./cmd/traefik/
|
||||
GOARCH=${ARCH} GOOS=${OS} CGO_ENABLED=0 ${GO_BUILD_CMD} "$GO_BUILD_OPT" -o "dist/traefik_$OS-$ARCH" ./cmd/traefik/
|
||||
done
|
||||
done
|
||||
|
||||
# Build arm binaries
|
||||
OS_PLATFORM_ARG=(linux)
|
||||
OS_ARCH_ARG=(arm)
|
||||
ARM_ARG=(6)
|
||||
for OS in ${OS_PLATFORM_ARG[@]}; do
|
||||
for ARCH in ${OS_ARCH_ARG[@]}; do
|
||||
echo "Building binary for $OS/$ARCH..."
|
||||
GOARCH=$ARCH GOOS=$OS CGO_ENABLED=0 $GO_BUILD_CMD "$GO_BUILD_OPT" -o "dist/traefik_$OS-$ARCH" ./cmd/traefik/
|
||||
for ARM in ${ARM_ARG[@]}; do
|
||||
echo "Building binary for $OS/${ARCH}32v${ARM}..."
|
||||
GOARCH=${ARCH} GOOS=${OS} GOARM=${ARM} CGO_ENABLED=0 ${GO_BUILD_CMD} "$GO_BUILD_OPT" -o "dist/traefik_$OS-${ARCH}" ./cmd/traefik/
|
||||
done
|
||||
done
|
||||
done
|
||||
|
97
vendor/github.com/NYTimes/gziphandler/gzip.go
generated
vendored
97
vendor/github.com/NYTimes/gziphandler/gzip.go
generated
vendored
@@ -81,6 +81,8 @@ type GzipResponseWriter struct {
|
||||
|
||||
minSize int // Specifed the minimum response size to gzip. If the response length is bigger than this value, it is compressed.
|
||||
buf []byte // Holds the first part of the write before reaching the minSize or the end of the write.
|
||||
|
||||
contentTypes []string // Only compress if the response is one of these content-types. All are accepted if empty.
|
||||
}
|
||||
|
||||
// Write appends data to the gzip writer.
|
||||
@@ -101,8 +103,10 @@ func (w *GzipResponseWriter) Write(b []byte) (int, error) {
|
||||
// On the first write, w.buf changes from nil to a valid slice
|
||||
w.buf = append(w.buf, b...)
|
||||
|
||||
// If the global writes are bigger than the minSize, compression is enable.
|
||||
if len(w.buf) >= w.minSize {
|
||||
// If the global writes are bigger than the minSize and we're about to write
|
||||
// a response containing a content type we want to handle, enable
|
||||
// compression.
|
||||
if len(w.buf) >= w.minSize && handleContentType(w.contentTypes, w) {
|
||||
err := w.startGzip()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
@@ -231,24 +235,29 @@ func NewGzipLevelHandler(level int) (func(http.Handler) http.Handler, error) {
|
||||
// NewGzipLevelAndMinSize behave as NewGzipLevelHandler except it let the caller
|
||||
// specify the minimum size before compression.
|
||||
func NewGzipLevelAndMinSize(level, minSize int) (func(http.Handler) http.Handler, error) {
|
||||
return NewGzipHandler(level, minSize, &GzipResponseWriter{})
|
||||
return GzipHandlerWithOpts(&GzipResponseWriter{}, CompressionLevel(level), MinSize(minSize))
|
||||
}
|
||||
|
||||
// NewGzipHandler behave as NewGzipLevelHandler except it let the caller
|
||||
// specify the minimum size before compression and a GzipWriter.
|
||||
func NewGzipHandler(level, minSize int, gw GzipWriter) (func(http.Handler) http.Handler, error) {
|
||||
if level != gzip.DefaultCompression && (level < gzip.BestSpeed || level > gzip.BestCompression) {
|
||||
return nil, fmt.Errorf("invalid compression level requested: %d", level)
|
||||
}
|
||||
if minSize < 0 {
|
||||
return nil, errors.New("minimum size must be more than zero")
|
||||
}
|
||||
func GzipHandlerWithOpts(gw GzipWriter, opts ...option) (func(http.Handler) http.Handler, error) {
|
||||
if gw == nil {
|
||||
return nil, errors.New("the GzipWriter must be defined")
|
||||
}
|
||||
|
||||
c := &config{
|
||||
level: gzip.DefaultCompression,
|
||||
minSize: DefaultMinSize,
|
||||
}
|
||||
|
||||
for _, o := range opts {
|
||||
o(c)
|
||||
}
|
||||
|
||||
if err := c.validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return func(h http.Handler) http.Handler {
|
||||
index := poolIndex(level)
|
||||
index := poolIndex(c.level)
|
||||
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add(vary, acceptEncoding)
|
||||
@@ -256,7 +265,8 @@ func NewGzipHandler(level, minSize int, gw GzipWriter) (func(http.Handler) http.
|
||||
if acceptsGzip(r) {
|
||||
gw.SetResponseWriter(w)
|
||||
gw.setIndex(index)
|
||||
gw.setMinSize(minSize)
|
||||
gw.setMinSize(c.minSize)
|
||||
gw.setContentTypes(c.contentTypes)
|
||||
defer gw.Close()
|
||||
|
||||
h.ServeHTTP(gw, r)
|
||||
@@ -267,6 +277,48 @@ func NewGzipHandler(level, minSize int, gw GzipWriter) (func(http.Handler) http.
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Used for functional configuration.
|
||||
type config struct {
|
||||
minSize int
|
||||
level int
|
||||
contentTypes []string
|
||||
}
|
||||
|
||||
func (c *config) validate() error {
|
||||
if c.level != gzip.DefaultCompression && (c.level < gzip.BestSpeed || c.level > gzip.BestCompression) {
|
||||
return fmt.Errorf("invalid compression level requested: %d", c.level)
|
||||
}
|
||||
|
||||
if c.minSize < 0 {
|
||||
return fmt.Errorf("minimum size must be more than zero")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type option func(c *config)
|
||||
|
||||
func MinSize(size int) option {
|
||||
return func(c *config) {
|
||||
c.minSize = size
|
||||
}
|
||||
}
|
||||
|
||||
func CompressionLevel(level int) option {
|
||||
return func(c *config) {
|
||||
c.level = level
|
||||
}
|
||||
}
|
||||
|
||||
func ContentTypes(types []string) option {
|
||||
return func(c *config) {
|
||||
c.contentTypes = []string{}
|
||||
for _, v := range types {
|
||||
c.contentTypes = append(c.contentTypes, strings.ToLower(v))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GzipHandler wraps an HTTP handler, to transparently gzip the response body if
|
||||
// the client supports it (via the Accept-Encoding header). This will compress at
|
||||
// the default compression level.
|
||||
@@ -282,6 +334,23 @@ func acceptsGzip(r *http.Request) bool {
|
||||
return acceptedEncodings["gzip"] > 0.0
|
||||
}
|
||||
|
||||
// returns true if we've been configured to compress the specific content type.
|
||||
func handleContentType(contentTypes []string, w http.ResponseWriter) bool {
|
||||
// If contentTypes is empty we handle all content types.
|
||||
if len(contentTypes) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
ct := strings.ToLower(w.Header().Get(contentType))
|
||||
for _, c := range contentTypes {
|
||||
if c == ct {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// parseEncodings attempts to parse a list of codings, per RFC 2616, as might
|
||||
// appear in an Accept-Encoding header. It returns a map of content-codings to
|
||||
// quality values, and an error containing the errors encountered. It's probably
|
||||
|
8
vendor/github.com/NYTimes/gziphandler/wrapper.go
generated
vendored
8
vendor/github.com/NYTimes/gziphandler/wrapper.go
generated
vendored
@@ -23,6 +23,7 @@ type GzipWriter interface {
|
||||
SetResponseWriter(http.ResponseWriter)
|
||||
setIndex(int)
|
||||
setMinSize(int)
|
||||
setContentTypes([]string)
|
||||
}
|
||||
|
||||
func (w *GzipResponseWriter) SetResponseWriter(rw http.ResponseWriter) {
|
||||
@@ -37,6 +38,10 @@ func (w *GzipResponseWriter) setMinSize(minSize int) {
|
||||
w.minSize = minSize
|
||||
}
|
||||
|
||||
func (w *GzipResponseWriter) setContentTypes(contentTypes []string) {
|
||||
w.contentTypes = contentTypes
|
||||
}
|
||||
|
||||
// --------
|
||||
|
||||
type GzipResponseWriterWrapper struct {
|
||||
@@ -45,6 +50,9 @@ type GzipResponseWriterWrapper struct {
|
||||
|
||||
func (g *GzipResponseWriterWrapper) Write(b []byte) (int, error) {
|
||||
if g.gw == nil && isEncoded(g.Header()) {
|
||||
if g.code != 0 {
|
||||
g.ResponseWriter.WriteHeader(g.code)
|
||||
}
|
||||
return g.ResponseWriter.Write(b)
|
||||
}
|
||||
return g.GzipResponseWriter.Write(b)
|
||||
|
7
vendor/github.com/vulcand/oxy/forward/fwd.go
generated
vendored
7
vendor/github.com/vulcand/oxy/forward/fwd.go
generated
vendored
@@ -128,6 +128,7 @@ func New(setters ...optSetter) (*Forwarder, error) {
|
||||
if f.httpForwarder.roundTripper == nil {
|
||||
f.httpForwarder.roundTripper = http.DefaultTransport
|
||||
}
|
||||
f.websocketForwarder.TLSClientConfig = f.httpForwarder.roundTripper.(*http.Transport).TLSClientConfig
|
||||
if f.httpForwarder.rewriter == nil {
|
||||
h, err := os.Hostname()
|
||||
if err != nil {
|
||||
@@ -317,7 +318,11 @@ func (f *websocketForwarder) copyRequest(req *http.Request, u *url.URL) (outReq
|
||||
}
|
||||
|
||||
if requestURI, err := url.ParseRequestURI(outReq.RequestURI); err == nil {
|
||||
outReq.URL.Path = requestURI.Path
|
||||
if requestURI.RawPath != "" {
|
||||
outReq.URL.Path = requestURI.RawPath
|
||||
} else {
|
||||
outReq.URL.Path = requestURI.Path
|
||||
}
|
||||
outReq.URL.RawQuery = requestURI.RawQuery
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user