diff --git a/app/router.go b/app/router.go
index e9efa0156..57316f8a6 100644
--- a/app/router.go
+++ b/app/router.go
@@ -11,7 +11,7 @@ import (
"sync"
"time"
- "github.com/PuerkitoBio/ghost/handlers"
+ "github.com/NYTimes/gziphandler"
log "github.com/Sirupsen/logrus"
"github.com/gorilla/mux"
"github.com/ugorji/go/codec"
@@ -86,32 +86,29 @@ func matchURL(r *http.Request, pattern string) (map[string]string, bool) {
return vars, true
}
-func gzipHandler(h http.HandlerFunc) http.HandlerFunc {
- return handlers.GZIPHandlerFunc(h, nil)
+func gzipHandler(h http.HandlerFunc) http.Handler {
+ return gziphandler.GzipHandler(h)
}
// RegisterTopologyRoutes registers the various topology routes with a http mux.
func RegisterTopologyRoutes(router *mux.Router, r Reporter, capabilities map[string]bool) {
get := router.Methods("GET").Subrouter()
- get.HandleFunc("/api",
+ get.Handle("/api",
gzipHandler(requestContextDecorator(apiHandler(r, capabilities))))
- get.HandleFunc("/api/topology",
+ get.Handle("/api/topology",
gzipHandler(requestContextDecorator(topologyRegistry.makeTopologyList(r))))
- get.
- HandleFunc("/api/topology/{topology}",
- gzipHandler(requestContextDecorator(topologyRegistry.captureRenderer(r, handleTopology)))).
+ get.Handle("/api/topology/{topology}",
+ gzipHandler(requestContextDecorator(topologyRegistry.captureRenderer(r, handleTopology)))).
Name("api_topology_topology")
- get.
- HandleFunc("/api/topology/{topology}/ws",
- requestContextDecorator(captureReporter(r, handleWebsocket))). // NB not gzip!
+ get.Handle("/api/topology/{topology}/ws",
+ requestContextDecorator(captureReporter(r, handleWebsocket))). // NB not gzip!
Name("api_topology_topology_ws")
- get.
- MatcherFunc(URLMatcher("/api/topology/{topology}/{id}")).HandlerFunc(
+ get.MatcherFunc(URLMatcher("/api/topology/{topology}/{id}")).Handler(
gzipHandler(requestContextDecorator(topologyRegistry.captureRenderer(r, handleNode)))).
Name("api_topology_topology_id")
- get.HandleFunc("/api/report",
+ get.Handle("/api/report",
gzipHandler(requestContextDecorator(makeRawReportHandler(r))))
- get.HandleFunc("/api/probes",
+ get.Handle("/api/probes",
gzipHandler(requestContextDecorator(makeProbeHandler(r))))
}
diff --git a/extras/copyreport/main.go b/extras/copyreport/main.go
index 07e47d067..60e8eeb87 100644
--- a/extras/copyreport/main.go
+++ b/extras/copyreport/main.go
@@ -2,7 +2,6 @@
package main
import (
- "compress/gzip"
"flag"
"log"
@@ -20,7 +19,7 @@ func main() {
if err != nil {
log.Fatal(err)
}
- if err = rpt.WriteToFile(flag.Arg(1), gzip.DefaultCompression); err != nil {
+ if err = rpt.WriteToFile(flag.Arg(1)); err != nil {
log.Fatal(err)
}
}
diff --git a/report/marshal.go b/report/marshal.go
index 1dc7fe52d..eaa790251 100644
--- a/report/marshal.go
+++ b/report/marshal.go
@@ -39,11 +39,10 @@ func (StdoutPublisher) Publish(rep Report) error {
// WriteBinary writes a Report as a gzipped msgpack into a bytes.Buffer
func (rep Report) WriteBinary() (*bytes.Buffer, error) {
w := &bytes.Buffer{}
- gzwriter, err := gzip.NewWriterLevel(w, gzip.DefaultCompression)
- if err != nil {
- return nil, err
- }
- if err = codec.NewEncoder(gzwriter, &codec.MsgpackHandle{}).Encode(&rep); err != nil {
+ gzwriter := gzipWriterPool.Get().(*gzip.Writer)
+ gzwriter.Reset(w)
+ defer gzipWriterPool.Put(gzwriter)
+ if err := codec.NewEncoder(gzwriter, &codec.MsgpackHandle{}).Encode(&rep); err != nil {
return nil, err
}
gzwriter.Close() // otherwise the content won't get flushed to the output stream
@@ -61,10 +60,14 @@ func (c byteCounter) Read(p []byte) (n int, err error) {
return n, err
}
-// buffer pool to reduce garbage-collection
+// buffer pools to reduce garbage-collection
var bufferPool = &sync.Pool{
New: func() interface{} { return new(bytes.Buffer) },
}
+var gzipWriterPool = &sync.Pool{
+ // NewWriterLevel() only errors if the compression level is invalid, which can't happen here
+ New: func() interface{} { w, _ := gzip.NewWriterLevel(nil, gzip.DefaultCompression); return w },
+}
// ReadBinary reads bytes into a Report.
//
@@ -169,7 +172,7 @@ func MakeFromFile(path string) (rpt Report, _ error) {
// WriteToFile writes a Report to a file. The encoding is determined
// by the file extension (".msgpack" or ".json", with an optional
// ".gz").
-func (rep *Report) WriteToFile(path string, compressionLevel int) error {
+func (rep *Report) WriteToFile(path string) error {
f, err := os.Create(path)
if err != nil {
return err
@@ -186,10 +189,9 @@ func (rep *Report) WriteToFile(path string, compressionLevel int) error {
defer bufwriter.Flush()
w = bufwriter
if gzipped {
- gzwriter, err := gzip.NewWriterLevel(w, compressionLevel)
- if err != nil {
- return err
- }
+ gzwriter := gzipWriterPool.Get().(*gzip.Writer)
+ gzwriter.Reset(w)
+ defer gzipWriterPool.Put(gzwriter)
defer gzwriter.Close()
w = gzwriter
}
diff --git a/vendor/github.com/NYTimes/gziphandler/LICENSE b/vendor/github.com/NYTimes/gziphandler/LICENSE
new file mode 100644
index 000000000..df6192d36
--- /dev/null
+++ b/vendor/github.com/NYTimes/gziphandler/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2016-2017 The New York Times Company
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/NYTimes/gziphandler/gzip.go b/vendor/github.com/NYTimes/gziphandler/gzip.go
new file mode 100644
index 000000000..028b553f7
--- /dev/null
+++ b/vendor/github.com/NYTimes/gziphandler/gzip.go
@@ -0,0 +1,485 @@
+package gziphandler
+
+import (
+ "bufio"
+ "compress/gzip"
+ "fmt"
+ "io"
+ "mime"
+ "net"
+ "net/http"
+ "strconv"
+ "strings"
+ "sync"
+)
+
+const (
+ vary = "Vary"
+ acceptEncoding = "Accept-Encoding"
+ contentEncoding = "Content-Encoding"
+ contentType = "Content-Type"
+ contentLength = "Content-Length"
+)
+
+type codings map[string]float64
+
+const (
+ // DefaultQValue is the default qvalue to assign to an encoding if no explicit qvalue is set.
+ // This is actually kind of ambiguous in RFC 2616, so hopefully it's correct.
+ // The examples seem to indicate that it is.
+ DefaultQValue = 1.0
+
+ // 1500 bytes is the MTU size for the internet since that is the largest size allowed at the network layer.
+ // If you take a file that is 1300 bytes and compress it to 800 bytes, it’s still transmitted in that same 1500 byte packet regardless, so you’ve gained nothing.
+ // That being the case, you should restrict the gzip compression to files with a size greater than a single packet, 1400 bytes (1.4KB) is a safe value.
+ DefaultMinSize = 1400
+)
+
+// gzipWriterPools stores a sync.Pool for each compression level for reuse of
+// gzip.Writers. Use poolIndex to covert a compression level to an index into
+// gzipWriterPools.
+var gzipWriterPools [gzip.BestCompression - gzip.BestSpeed + 2]*sync.Pool
+
+func init() {
+ for i := gzip.BestSpeed; i <= gzip.BestCompression; i++ {
+ addLevelPool(i)
+ }
+ addLevelPool(gzip.DefaultCompression)
+}
+
+// poolIndex maps a compression level to its index into gzipWriterPools. It
+// assumes that level is a valid gzip compression level.
+func poolIndex(level int) int {
+ // gzip.DefaultCompression == -1, so we need to treat it special.
+ if level == gzip.DefaultCompression {
+ return gzip.BestCompression - gzip.BestSpeed + 1
+ }
+ return level - gzip.BestSpeed
+}
+
+func addLevelPool(level int) {
+ gzipWriterPools[poolIndex(level)] = &sync.Pool{
+ New: func() interface{} {
+ // NewWriterLevel only returns error on a bad level, we are guaranteeing
+ // that this will be a valid level so it is okay to ignore the returned
+ // error.
+ w, _ := gzip.NewWriterLevel(nil, level)
+ return w
+ },
+ }
+}
+
+// GzipResponseWriter provides an http.ResponseWriter interface, which gzips
+// bytes before writing them to the underlying response. This doesn't close the
+// writers, so don't forget to do that.
+// It can be configured to skip response smaller than minSize.
+type GzipResponseWriter struct {
+ http.ResponseWriter
+ index int // Index for gzipWriterPools.
+ gw *gzip.Writer
+
+ code int // Saves the WriteHeader value.
+
+ minSize int // Specifed the minimum response size to gzip. If the response length is bigger than this value, it is compressed.
+ buf []byte // Holds the first part of the write before reaching the minSize or the end of the write.
+
+ contentTypes []parsedContentType // Only compress if the response is one of these content-types. All are accepted if empty.
+}
+
+type GzipResponseWriterWithCloseNotify struct {
+ *GzipResponseWriter
+}
+
+func (w GzipResponseWriterWithCloseNotify) CloseNotify() <-chan bool {
+ return w.ResponseWriter.(http.CloseNotifier).CloseNotify()
+}
+
+// Write appends data to the gzip writer.
+func (w *GzipResponseWriter) Write(b []byte) (int, error) {
+ // If content type is not set.
+ if _, ok := w.Header()[contentType]; !ok {
+ // It infer it from the uncompressed body.
+ w.Header().Set(contentType, http.DetectContentType(b))
+ }
+
+ // GZIP responseWriter is initialized. Use the GZIP responseWriter.
+ if w.gw != nil {
+ n, err := w.gw.Write(b)
+ return n, err
+ }
+
+ // Save the write into a buffer for later use in GZIP responseWriter (if content is long enough) or at close with regular responseWriter.
+ // On the first write, w.buf changes from nil to a valid slice
+ w.buf = append(w.buf, b...)
+
+ // If the global writes are bigger than the minSize and we're about to write
+ // a response containing a content type we want to handle, enable
+ // compression.
+ if len(w.buf) >= w.minSize && handleContentType(w.contentTypes, w) && w.Header().Get(contentEncoding) == "" {
+ err := w.startGzip()
+ if err != nil {
+ return 0, err
+ }
+ }
+
+ return len(b), nil
+}
+
+// startGzip initialize any GZIP specific informations.
+func (w *GzipResponseWriter) startGzip() error {
+
+ // Set the GZIP header.
+ w.Header().Set(contentEncoding, "gzip")
+
+ // if the Content-Length is already set, then calls to Write on gzip
+ // will fail to set the Content-Length header since its already set
+ // See: https://github.com/golang/go/issues/14975.
+ w.Header().Del(contentLength)
+
+ // Write the header to gzip response.
+ if w.code != 0 {
+ w.ResponseWriter.WriteHeader(w.code)
+ }
+
+ // Initialize the GZIP response.
+ w.init()
+
+ // Flush the buffer into the gzip response.
+ n, err := w.gw.Write(w.buf)
+
+ // This should never happen (per io.Writer docs), but if the write didn't
+ // accept the entire buffer but returned no specific error, we have no clue
+ // what's going on, so abort just to be safe.
+ if err == nil && n < len(w.buf) {
+ return io.ErrShortWrite
+ }
+
+ w.buf = nil
+ return err
+}
+
+// WriteHeader just saves the response code until close or GZIP effective writes.
+func (w *GzipResponseWriter) WriteHeader(code int) {
+ if w.code == 0 {
+ w.code = code
+ }
+}
+
+// init graps a new gzip writer from the gzipWriterPool and writes the correct
+// content encoding header.
+func (w *GzipResponseWriter) init() {
+ // Bytes written during ServeHTTP are redirected to this gzip writer
+ // before being written to the underlying response.
+ gzw := gzipWriterPools[w.index].Get().(*gzip.Writer)
+ gzw.Reset(w.ResponseWriter)
+ w.gw = gzw
+}
+
+// Close will close the gzip.Writer and will put it back in the gzipWriterPool.
+func (w *GzipResponseWriter) Close() error {
+ if w.gw == nil {
+ // Gzip not trigged yet, write out regular response.
+ if w.code != 0 {
+ w.ResponseWriter.WriteHeader(w.code)
+ }
+ if w.buf != nil {
+ _, writeErr := w.ResponseWriter.Write(w.buf)
+ // Returns the error if any at write.
+ if writeErr != nil {
+ return fmt.Errorf("gziphandler: write to regular responseWriter at close gets error: %q", writeErr.Error())
+ }
+ }
+ return nil
+ }
+
+ err := w.gw.Close()
+ gzipWriterPools[w.index].Put(w.gw)
+ w.gw = nil
+ return err
+}
+
+// Flush flushes the underlying *gzip.Writer and then the underlying
+// http.ResponseWriter if it is an http.Flusher. This makes GzipResponseWriter
+// an http.Flusher.
+func (w *GzipResponseWriter) Flush() {
+ if w.gw == nil {
+ // Only flush once startGzip has been called.
+ //
+ // Flush is thus a no-op until the written body
+ // exceeds minSize.
+ return
+ }
+
+ w.gw.Flush()
+
+ if fw, ok := w.ResponseWriter.(http.Flusher); ok {
+ fw.Flush()
+ }
+}
+
+// Hijack implements http.Hijacker. If the underlying ResponseWriter is a
+// Hijacker, its Hijack method is returned. Otherwise an error is returned.
+func (w *GzipResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {
+ if hj, ok := w.ResponseWriter.(http.Hijacker); ok {
+ return hj.Hijack()
+ }
+ return nil, nil, fmt.Errorf("http.Hijacker interface is not supported")
+}
+
+// verify Hijacker interface implementation
+var _ http.Hijacker = &GzipResponseWriter{}
+
+// MustNewGzipLevelHandler behaves just like NewGzipLevelHandler except that in
+// an error case it panics rather than returning an error.
+func MustNewGzipLevelHandler(level int) func(http.Handler) http.Handler {
+ wrap, err := NewGzipLevelHandler(level)
+ if err != nil {
+ panic(err)
+ }
+ return wrap
+}
+
+// NewGzipLevelHandler returns a wrapper function (often known as middleware)
+// which can be used to wrap an HTTP handler to transparently gzip the response
+// body if the client supports it (via the Accept-Encoding header). Responses will
+// be encoded at the given gzip compression level. An error will be returned only
+// if an invalid gzip compression level is given, so if one can ensure the level
+// is valid, the returned error can be safely ignored.
+func NewGzipLevelHandler(level int) (func(http.Handler) http.Handler, error) {
+ return NewGzipLevelAndMinSize(level, DefaultMinSize)
+}
+
+// NewGzipLevelAndMinSize behave as NewGzipLevelHandler except it let the caller
+// specify the minimum size before compression.
+func NewGzipLevelAndMinSize(level, minSize int) (func(http.Handler) http.Handler, error) {
+ return GzipHandlerWithOpts(CompressionLevel(level), MinSize(minSize))
+}
+
+func GzipHandlerWithOpts(opts ...option) (func(http.Handler) http.Handler, error) {
+ c := &config{
+ level: gzip.DefaultCompression,
+ minSize: DefaultMinSize,
+ }
+
+ for _, o := range opts {
+ o(c)
+ }
+
+ if err := c.validate(); err != nil {
+ return nil, err
+ }
+
+ return func(h http.Handler) http.Handler {
+ index := poolIndex(c.level)
+
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Add(vary, acceptEncoding)
+ if acceptsGzip(r) {
+ gw := &GzipResponseWriter{
+ ResponseWriter: w,
+ index: index,
+ minSize: c.minSize,
+ contentTypes: c.contentTypes,
+ }
+ defer gw.Close()
+
+ if _, ok := w.(http.CloseNotifier); ok {
+ gwcn := GzipResponseWriterWithCloseNotify{gw}
+ h.ServeHTTP(gwcn, r)
+ } else {
+ h.ServeHTTP(gw, r)
+ }
+
+ } else {
+ h.ServeHTTP(w, r)
+ }
+ })
+ }, nil
+}
+
+// Parsed representation of one of the inputs to ContentTypes.
+// See https://golang.org/pkg/mime/#ParseMediaType
+type parsedContentType struct {
+ mediaType string
+ params map[string]string
+}
+
+// equals returns whether this content type matches another content type.
+func (pct parsedContentType) equals(mediaType string, params map[string]string) bool {
+ if pct.mediaType != mediaType {
+ return false
+ }
+ // if pct has no params, don't care about other's params
+ if len(pct.params) == 0 {
+ return true
+ }
+
+ // if pct has any params, they must be identical to other's.
+ if len(pct.params) != len(params) {
+ return false
+ }
+ for k, v := range pct.params {
+ if w, ok := params[k]; !ok || v != w {
+ return false
+ }
+ }
+ return true
+}
+
+// Used for functional configuration.
+type config struct {
+ minSize int
+ level int
+ contentTypes []parsedContentType
+}
+
+func (c *config) validate() error {
+ if c.level != gzip.DefaultCompression && (c.level < gzip.BestSpeed || c.level > gzip.BestCompression) {
+ return fmt.Errorf("invalid compression level requested: %d", c.level)
+ }
+
+ if c.minSize < 0 {
+ return fmt.Errorf("minimum size must be more than zero")
+ }
+
+ return nil
+}
+
+type option func(c *config)
+
+func MinSize(size int) option {
+ return func(c *config) {
+ c.minSize = size
+ }
+}
+
+func CompressionLevel(level int) option {
+ return func(c *config) {
+ c.level = level
+ }
+}
+
+// ContentTypes specifies a list of content types to compare
+// the Content-Type header to before compressing. If none
+// match, the response will be returned as-is.
+//
+// Content types are compared in a case-insensitive, whitespace-ignored
+// manner.
+//
+// A MIME type without any other directive will match a content type
+// that has the same MIME type, regardless of that content type's other
+// directives. I.e., "text/html" will match both "text/html" and
+// "text/html; charset=utf-8".
+//
+// A MIME type with any other directive will only match a content type
+// that has the same MIME type and other directives. I.e.,
+// "text/html; charset=utf-8" will only match "text/html; charset=utf-8".
+//
+// By default, responses are gzipped regardless of
+// Content-Type.
+func ContentTypes(types []string) option {
+ return func(c *config) {
+ c.contentTypes = []parsedContentType{}
+ for _, v := range types {
+ mediaType, params, err := mime.ParseMediaType(v)
+ if err == nil {
+ c.contentTypes = append(c.contentTypes, parsedContentType{mediaType, params})
+ }
+ }
+ }
+}
+
+// GzipHandler wraps an HTTP handler, to transparently gzip the response body if
+// the client supports it (via the Accept-Encoding header). This will compress at
+// the default compression level.
+func GzipHandler(h http.Handler) http.Handler {
+ wrapper, _ := NewGzipLevelHandler(gzip.DefaultCompression)
+ return wrapper(h)
+}
+
+// acceptsGzip returns true if the given HTTP request indicates that it will
+// accept a gzipped response.
+func acceptsGzip(r *http.Request) bool {
+ acceptedEncodings, _ := parseEncodings(r.Header.Get(acceptEncoding))
+ return acceptedEncodings["gzip"] > 0.0
+}
+
+// returns true if we've been configured to compress the specific content type.
+func handleContentType(contentTypes []parsedContentType, w http.ResponseWriter) bool {
+ // If contentTypes is empty we handle all content types.
+ if len(contentTypes) == 0 {
+ return true
+ }
+
+ ct := w.Header().Get(contentType)
+ mediaType, params, err := mime.ParseMediaType(ct)
+ if err != nil {
+ return false
+ }
+
+ for _, c := range contentTypes {
+ if c.equals(mediaType, params) {
+ return true
+ }
+ }
+
+ return false
+}
+
+// parseEncodings attempts to parse a list of codings, per RFC 2616, as might
+// appear in an Accept-Encoding header. It returns a map of content-codings to
+// quality values, and an error containing the errors encountered. It's probably
+// safe to ignore those, because silently ignoring errors is how the internet
+// works.
+//
+// See: http://tools.ietf.org/html/rfc2616#section-14.3.
+func parseEncodings(s string) (codings, error) {
+ c := make(codings)
+ var e []string
+
+ for _, ss := range strings.Split(s, ",") {
+ coding, qvalue, err := parseCoding(ss)
+
+ if err != nil {
+ e = append(e, err.Error())
+ } else {
+ c[coding] = qvalue
+ }
+ }
+
+ // TODO (adammck): Use a proper multi-error struct, so the individual errors
+ // can be extracted if anyone cares.
+ if len(e) > 0 {
+ return c, fmt.Errorf("errors while parsing encodings: %s", strings.Join(e, ", "))
+ }
+
+ return c, nil
+}
+
+// parseCoding parses a single conding (content-coding with an optional qvalue),
+// as might appear in an Accept-Encoding header. It attempts to forgive minor
+// formatting errors.
+func parseCoding(s string) (coding string, qvalue float64, err error) {
+ for n, part := range strings.Split(s, ";") {
+ part = strings.TrimSpace(part)
+ qvalue = DefaultQValue
+
+ if n == 0 {
+ coding = strings.ToLower(part)
+ } else if strings.HasPrefix(part, "q=") {
+ qvalue, err = strconv.ParseFloat(strings.TrimPrefix(part, "q="), 64)
+
+ if qvalue < 0.0 {
+ qvalue = 0.0
+ } else if qvalue > 1.0 {
+ qvalue = 1.0
+ }
+ }
+ }
+
+ if coding == "" {
+ err = fmt.Errorf("empty content-coding")
+ }
+
+ return
+}
diff --git a/vendor/github.com/NYTimes/gziphandler/gzip_go18.go b/vendor/github.com/NYTimes/gziphandler/gzip_go18.go
new file mode 100644
index 000000000..fa9665b7e
--- /dev/null
+++ b/vendor/github.com/NYTimes/gziphandler/gzip_go18.go
@@ -0,0 +1,43 @@
+// +build go1.8
+
+package gziphandler
+
+import "net/http"
+
+// Push initiates an HTTP/2 server push.
+// Push returns ErrNotSupported if the client has disabled push or if push
+// is not supported on the underlying connection.
+func (w *GzipResponseWriter) Push(target string, opts *http.PushOptions) error {
+ pusher, ok := w.ResponseWriter.(http.Pusher)
+ if ok && pusher != nil {
+ return pusher.Push(target, setAcceptEncodingForPushOptions(opts))
+ }
+ return http.ErrNotSupported
+}
+
+// setAcceptEncodingForPushOptions sets "Accept-Encoding" : "gzip" for PushOptions without overriding existing headers.
+func setAcceptEncodingForPushOptions(opts *http.PushOptions) *http.PushOptions {
+
+ if opts == nil {
+ opts = &http.PushOptions{
+ Header: http.Header{
+ acceptEncoding: []string{"gzip"},
+ },
+ }
+ return opts
+ }
+
+ if opts.Header == nil {
+ opts.Header = http.Header{
+ acceptEncoding: []string{"gzip"},
+ }
+ return opts
+ }
+
+ if encoding := opts.Header.Get(acceptEncoding); encoding == "" {
+ opts.Header.Add(acceptEncoding, "gzip")
+ return opts
+ }
+
+ return opts
+}
diff --git a/vendor/github.com/PuerkitoBio/ghost/LICENSE b/vendor/github.com/PuerkitoBio/ghost/LICENSE
deleted file mode 100644
index d88451f3f..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/LICENSE
+++ /dev/null
@@ -1,12 +0,0 @@
-Copyright (c) 2013, Martin Angers
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-
-* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-
-* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/PuerkitoBio/ghost/README.md b/vendor/github.com/PuerkitoBio/ghost/README.md
deleted file mode 100644
index 5843b3eea..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/README.md
+++ /dev/null
@@ -1,83 +0,0 @@
-# Ghost
-
-Ghost is a web development library loosely inspired by node's [Connect library][connect]. It provides a number of simple, single-responsibility HTTP handlers that can be combined to build a full-featured web server, and a generic template engine integration interface.
-
-It stays close to the metal, not abstracting Go's standard library away. As a matter of fact, any stdlib handler can be used with Ghost's handlers, they simply are `net/http.Handler`'s.
-
-## Installation and documentation
-
-`go get github.com/PuerkitoBio/ghost`
-
-[API reference][godoc]
-
-*Status* : Still under development, things will change.
-
-## Example
-
-See the /ghostest directory for a complete working example of a website built with Ghost. It shows all handlers and template support of Ghost.
-
-## Handlers
-
-Ghost offers the following handlers:
-
-* BasicAuthHandler : basic authentication support.
-* ContextHandler : key-value map provider for the duration of the request.
-* FaviconHandler : simple and efficient favicon renderer.
-* GZIPHandler : gzip-compresser for the body of the response.
-* LogHandler : fully customizable request logger.
-* PanicHandler : panic-catching handler to control the error response.
-* SessionHandler : store-agnostic server-side session provider.
-* StaticHandler : convenience handler that wraps a call to `net/http.ServeFile`.
-
-Two stores are provided for the session persistence, `MemoryStore`, an in-memory map that is not suited for production environment, and `RedisStore`, a more robust and scalable [redigo][]-based Redis store. Because of the generic `SessionStore` interface, custom stores can easily be created as needed.
-
-The `handlers` package also offers the `ChainableHandler` interface, which supports combining HTTP handlers in a sequential fashion, and the `ChainHandlers()` function that creates a new handler from the sequential combination of any number of handlers.
-
-As a convenience, all functions that take a `http.Handler` as argument also have a corresponding function with the `Func` suffix that take a `http.HandlerFunc` instead as argument. This saves the type-cast when a simple handler function is passed (for example, `SessionHandler()` and `SessionHandlerFunc()`).
-
-### Handlers Design
-
-The HTTP handlers such as Basic Auth and Context need to store some state information to provide their functionality. Instead of using variables and a mutex to control shared access, Ghost augments the `http.ResponseWriter` interface that is part of the Handler's `ServeHTTP()` function signature. Because this instance is unique for each request and is not shared, there is no locking involved to access the state information.
-
-However, when combining such handlers, Ghost needs a way to move through the chain of augmented ResponseWriters. This is why these *augmented writers* need to implement the `WrapWriter` interface. A single method is required, `WrappedWriter() http.ResponseWriter`, which returns the wrapped ResponseWriter.
-
-And to get back a specific augmented writer, the `GetResponseWriter()` function is provided. It takes a ResponseWriter and a predicate function as argument, and returns the requested specific writer using the *comma-ok* pattern. Example, for the session writer:
-
-```Go
-func getSessionWriter(w http.ResponseWriter) (*sessResponseWriter, bool) {
- ss, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*sessResponseWriter)
- return ok
- })
- if ok {
- return ss.(*sessResponseWriter), true
- }
- return nil, false
-}
-```
-
-Ghost does not provide a muxer, there are already many great ones available, but I would recommend Go's native `http.ServeMux` or [pat][] because it has great features and plays well with Ghost's design. Gorilla's muxer is very popular, but since it depends on Gorilla's (mutex-based) context provider, this is redundant with Ghost's context.
-
-## Templates
-
-Ghost supports the following template engines:
-
-* Go's native templates (needs work, at the moment does not work with nested templates)
-* [Amber][]
-
-TODO : Go's mustache implementation.
-
-### Templates Design
-
-The template engines can be registered much in the same way as database drivers, just by importing for side effects (using `_ "import/path"`). The `init()` function of the template engine's package registers the template compiler with the correct file extension, and the engine can be used.
-
-## License
-
-The [BSD 3-Clause license][lic].
-
-[connect]: https://github.com/senchalabs/connect
-[godoc]: http://godoc.org/github.com/PuerkitoBio/ghost
-[lic]: http://opensource.org/licenses/BSD-3-Clause
-[redigo]: https://github.com/garyburd/redigo
-[pat]: https://github.com/bmizerany/pat
-[amber]: https://github.com/eknkc/amber
diff --git a/vendor/github.com/PuerkitoBio/ghost/app.go b/vendor/github.com/PuerkitoBio/ghost/app.go
deleted file mode 100644
index 5635f4c26..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/app.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package ghost
-
-import (
- "log"
-)
-
-// Logging function, defaults to Go's native log.Printf function. The idea to use
-// this instead of a *log.Logger struct is that it can be set to any of log.{Printf,Fatalf, Panicf},
-// but also to more flexible userland loggers like SeeLog (https://github.com/cihub/seelog).
-// It could be set, for example, to SeeLog's Debugf function. Any function with the
-// signature func(fmt string, params ...interface{}).
-var LogFn = log.Printf
diff --git a/vendor/github.com/PuerkitoBio/ghost/ghostest/index.html b/vendor/github.com/PuerkitoBio/ghost/ghostest/index.html
deleted file mode 100644
index ec844b0c9..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/ghostest/index.html
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
- ").append(x.parseHTML(e)).find(r):e)}).complete(n&&function(e,t){s.each(n,o||[e.responseText,t,e])}),this},x.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){x.fn[t]=function(e){return this.on(t,e)}}),x.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Xt,type:"GET",isLocal:Qt.test(_t[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":on,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":x.parseJSON,"text xml":x.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?ln(ln(e,x.ajaxSettings),t):ln(x.ajaxSettings,e)},ajaxPrefilter:an(nn),ajaxTransport:an(rn),ajax:function(e,t){"object"==typeof e&&(t=e,e=undefined),t=t||{};var n,r,i,o,s,a,u,l,c=x.ajaxSetup({},t),f=c.context||c,p=c.context&&(f.nodeType||f.jquery)?x(f):x.event,h=x.Deferred(),d=x.Callbacks("once memory"),g=c.statusCode||{},m={},y={},v=0,b="canceled",T={readyState:0,getResponseHeader:function(e){var t;if(2===v){if(!o){o={};while(t=Jt.exec(i))o[t[1].toLowerCase()]=t[2]}t=o[e.toLowerCase()]}return null==t?null:t},getAllResponseHeaders:function(){return 2===v?i:null},setRequestHeader:function(e,t){var n=e.toLowerCase();return v||(e=y[n]=y[n]||e,m[e]=t),this},overrideMimeType:function(e){return v||(c.mimeType=e),this},statusCode:function(e){var t;if(e)if(2>v)for(t in e)g[t]=[g[t],e[t]];else T.always(e[T.status]);return this},abort:function(e){var t=e||b;return n&&n.abort(t),k(0,t),this}};if(h.promise(T).complete=d.add,T.success=T.done,T.error=T.fail,c.url=((e||c.url||Xt)+"").replace(Vt,"").replace(Zt,_t[1]+"//"),c.type=t.method||t.type||c.method||c.type,c.dataTypes=x.trim(c.dataType||"*").toLowerCase().match(w)||[""],null==c.crossDomain&&(a=en.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===_t[1]&&a[2]===_t[2]&&(a[3]||("http:"===a[1]?"80":"443"))===(_t[3]||("http:"===_t[1]?"80":"443")))),c.data&&c.processData&&"string"!=typeof c.data&&(c.data=x.param(c.data,c.traditional)),un(nn,c,t,T),2===v)return T;u=c.global,u&&0===x.active++&&x.event.trigger("ajaxStart"),c.type=c.type.toUpperCase(),c.hasContent=!Kt.test(c.type),r=c.url,c.hasContent||(c.data&&(r=c.url+=(Yt.test(r)?"&":"?")+c.data,delete c.data),c.cache===!1&&(c.url=Gt.test(r)?r.replace(Gt,"$1_="+Ut++):r+(Yt.test(r)?"&":"?")+"_="+Ut++)),c.ifModified&&(x.lastModified[r]&&T.setRequestHeader("If-Modified-Since",x.lastModified[r]),x.etag[r]&&T.setRequestHeader("If-None-Match",x.etag[r])),(c.data&&c.hasContent&&c.contentType!==!1||t.contentType)&&T.setRequestHeader("Content-Type",c.contentType),T.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+("*"!==c.dataTypes[0]?", "+on+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)T.setRequestHeader(l,c.headers[l]);if(c.beforeSend&&(c.beforeSend.call(f,T,c)===!1||2===v))return T.abort();b="abort";for(l in{success:1,error:1,complete:1})T[l](c[l]);if(n=un(rn,c,t,T)){T.readyState=1,u&&p.trigger("ajaxSend",[T,c]),c.async&&c.timeout>0&&(s=setTimeout(function(){T.abort("timeout")},c.timeout));try{v=1,n.send(m,k)}catch(C){if(!(2>v))throw C;k(-1,C)}}else k(-1,"No Transport");function k(e,t,o,a){var l,m,y,b,w,C=t;2!==v&&(v=2,s&&clearTimeout(s),n=undefined,i=a||"",T.readyState=e>0?4:0,l=e>=200&&300>e||304===e,o&&(b=cn(c,T,o)),b=fn(c,b,T,l),l?(c.ifModified&&(w=T.getResponseHeader("Last-Modified"),w&&(x.lastModified[r]=w),w=T.getResponseHeader("etag"),w&&(x.etag[r]=w)),204===e?C="nocontent":304===e?C="notmodified":(C=b.state,m=b.data,y=b.error,l=!y)):(y=C,(e||!C)&&(C="error",0>e&&(e=0))),T.status=e,T.statusText=(t||C)+"",l?h.resolveWith(f,[m,C,T]):h.rejectWith(f,[T,C,y]),T.statusCode(g),g=undefined,u&&p.trigger(l?"ajaxSuccess":"ajaxError",[T,c,l?m:y]),d.fireWith(f,[T,C]),u&&(p.trigger("ajaxComplete",[T,c]),--x.active||x.event.trigger("ajaxStop")))}return T},getJSON:function(e,t,n){return x.get(e,t,n,"json")},getScript:function(e,t){return x.get(e,undefined,t,"script")}}),x.each(["get","post"],function(e,t){x[t]=function(e,n,r,i){return x.isFunction(n)&&(i=i||r,r=n,n=undefined),x.ajax({url:e,type:t,dataType:i,data:n,success:r})}});function cn(e,t,n){var r,i,o,s,a=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),r===undefined&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in a)if(a[i]&&a[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}s||(s=i)}o=o||s}return o?(o!==u[0]&&u.unshift(o),n[o]):undefined}function fn(e,t,n,r){var i,o,s,a,u,l={},c=e.dataTypes.slice();if(c[1])for(s in e.converters)l[s.toLowerCase()]=e.converters[s];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(s=l[u+" "+o]||l["* "+o],!s)for(i in l)if(a=i.split(" "),a[1]===o&&(s=l[u+" "+a[0]]||l["* "+a[0]])){s===!0?s=l[i]:l[i]!==!0&&(o=a[0],c.unshift(a[1]));break}if(s!==!0)if(s&&e["throws"])t=s(t);else try{t=s(t)}catch(f){return{state:"parsererror",error:s?f:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}x.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(e){return x.globalEval(e),e}}}),x.ajaxPrefilter("script",function(e){e.cache===undefined&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),x.ajaxTransport("script",function(e){if(e.crossDomain){var t,n;return{send:function(r,i){t=x("
-
-
-
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/basicauth.go b/vendor/github.com/PuerkitoBio/ghost/handlers/basicauth.go
deleted file mode 100644
index d77f13981..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/basicauth.go
+++ /dev/null
@@ -1,123 +0,0 @@
-package handlers
-
-// Inspired by node.js' Connect library implementation of the basicAuth middleware.
-// https://github.com/senchalabs/connect
-
-import (
- "bytes"
- "encoding/base64"
- "fmt"
- "net/http"
- "strings"
-)
-
-// Internal writer that keeps track of the currently authenticated user.
-type userResponseWriter struct {
- http.ResponseWriter
- user interface{}
- userName string
-}
-
-// Implement the WrapWriter interface.
-func (this *userResponseWriter) WrappedWriter() http.ResponseWriter {
- return this.ResponseWriter
-}
-
-// Writes an unauthorized response to the client, specifying the expected authentication
-// information.
-func Unauthorized(w http.ResponseWriter, realm string) {
- w.Header().Set("Www-Authenticate", fmt.Sprintf(`Basic realm="%s"`, realm))
- w.WriteHeader(http.StatusUnauthorized)
- w.Write([]byte("Unauthorized"))
-}
-
-// Writes a bad request response to the client, with an optional message.
-func BadRequest(w http.ResponseWriter, msg string) {
- w.WriteHeader(http.StatusBadRequest)
- if msg == "" {
- msg = "Bad Request"
- }
- w.Write([]byte(msg))
-}
-
-// BasicAuthHandlerFunc is the same as BasicAuthHandler, it is just a convenience
-// signature that accepts a func(http.ResponseWriter, *http.Request) instead of
-// a http.Handler interface. It saves the boilerplate http.HandlerFunc() cast.
-func BasicAuthHandlerFunc(h http.HandlerFunc,
- authFn func(string, string) (interface{}, bool), realm string) http.HandlerFunc {
- return BasicAuthHandler(h, authFn, realm)
-}
-
-// Returns a Basic Authentication handler, protecting the wrapped handler from
-// being accessed if the authentication function is not successful.
-func BasicAuthHandler(h http.Handler,
- authFn func(string, string) (interface{}, bool), realm string) http.HandlerFunc {
-
- if realm == "" {
- realm = "Authorization Required"
- }
- return func(w http.ResponseWriter, r *http.Request) {
- // Self-awareness
- if _, ok := GetUser(w); ok {
- h.ServeHTTP(w, r)
- return
- }
- authInfo := r.Header.Get("Authorization")
- if authInfo == "" {
- // No authorization info, return 401
- Unauthorized(w, realm)
- return
- }
- parts := strings.Split(authInfo, " ")
- if len(parts) != 2 {
- BadRequest(w, "Bad authorization header")
- return
- }
- scheme := parts[0]
- creds, err := base64.StdEncoding.DecodeString(parts[1])
- if err != nil {
- BadRequest(w, "Bad credentials encoding")
- return
- }
- index := bytes.Index(creds, []byte(":"))
- if scheme != "Basic" || index < 0 {
- BadRequest(w, "Bad authorization header")
- return
- }
- user, pwd := string(creds[:index]), string(creds[index+1:])
- udata, ok := authFn(user, pwd)
- if ok {
- // Save user data and continue
- uw := &userResponseWriter{w, udata, user}
- h.ServeHTTP(uw, r)
- } else {
- Unauthorized(w, realm)
- }
- }
-}
-
-// Return the currently authenticated user. This is the same data that was returned
-// by the authentication function passed to BasicAuthHandler.
-func GetUser(w http.ResponseWriter) (interface{}, bool) {
- usr, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*userResponseWriter)
- return ok
- })
- if ok {
- return usr.(*userResponseWriter).user, true
- }
- return nil, false
-}
-
-// Return the currently authenticated user name. This is the user name that was
-// authenticated for the current request.
-func GetUserName(w http.ResponseWriter) (string, bool) {
- usr, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*userResponseWriter)
- return ok
- })
- if ok {
- return usr.(*userResponseWriter).userName, true
- }
- return "", false
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/basicauth_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/basicauth_test.go
deleted file mode 100644
index 3181e5477..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/basicauth_test.go
+++ /dev/null
@@ -1,62 +0,0 @@
-package handlers
-
-import (
- "fmt"
- "net/http"
- "net/http/httptest"
- "testing"
-)
-
-func TestUnauth(t *testing.T) {
- h := BasicAuthHandler(StaticFileHandler("./testdata/script.js"), func(u, pwd string) (interface{}, bool) {
- if u == "me" && pwd == "you" {
- return u, true
- }
- return nil, false
- }, "foo")
- s := httptest.NewServer(h)
- defer s.Close()
-
- res, err := http.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusUnauthorized, res.StatusCode, t)
- assertHeader("Www-Authenticate", `Basic realm="foo"`, res, t)
-}
-
-func TestGzippedAuth(t *testing.T) {
- h := GZIPHandler(BasicAuthHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- usr, ok := GetUser(w)
- if assertTrue(ok, "expected authenticated user, got false", t) {
- assertTrue(usr.(string) == "meyou", fmt.Sprintf("expected user data to be 'meyou', got '%s'", usr), t)
- }
- usr, ok = GetUserName(w)
- if assertTrue(ok, "expected authenticated user name, got false", t) {
- assertTrue(usr == "me", fmt.Sprintf("expected user name to be 'me', got '%s'", usr), t)
- }
- w.Header().Set("Content-Type", "text/plain")
- w.Write([]byte(usr.(string)))
- }), func(u, pwd string) (interface{}, bool) {
- if u == "me" && pwd == "you" {
- return u + pwd, true
- }
- return nil, false
- }, ""), nil)
-
- s := httptest.NewServer(h)
- defer s.Close()
-
- req, err := http.NewRequest("GET", "http://me:you@"+s.URL[7:], nil)
- if err != nil {
- panic(err)
- }
- req.Header.Set("Accept-Encoding", "gzip")
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertGzippedBody([]byte("me"), res, t)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/chain.go b/vendor/github.com/PuerkitoBio/ghost/handlers/chain.go
deleted file mode 100644
index e3ae5dea1..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/chain.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package handlers
-
-import (
- "net/http"
-)
-
-// ChainableHandler is a valid Handler interface, and adds the possibility to
-// chain other handlers.
-type ChainableHandler interface {
- http.Handler
- Chain(http.Handler) ChainableHandler
- ChainFunc(http.HandlerFunc) ChainableHandler
-}
-
-// Default implementation of a simple ChainableHandler
-type chainHandler struct {
- http.Handler
-}
-
-func (this *chainHandler) ChainFunc(h http.HandlerFunc) ChainableHandler {
- return this.Chain(h)
-}
-
-// Implementation of the ChainableHandler interface, calls the chained handler
-// after the current one (sequential).
-func (this *chainHandler) Chain(h http.Handler) ChainableHandler {
- return &chainHandler{
- http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- // Add the chained handler after the call to this handler
- this.ServeHTTP(w, r)
- h.ServeHTTP(w, r)
- }),
- }
-}
-
-// Convert a standard http handler to a chainable handler interface.
-func NewChainableHandler(h http.Handler) ChainableHandler {
- return &chainHandler{
- h,
- }
-}
-
-// Helper function to chain multiple handler functions in a single call.
-func ChainHandlerFuncs(h ...http.HandlerFunc) ChainableHandler {
- return &chainHandler{
- http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- for _, v := range h {
- v(w, r)
- }
- }),
- }
-}
-
-// Helper function to chain multiple handlers in a single call.
-func ChainHandlers(h ...http.Handler) ChainableHandler {
- return &chainHandler{
- http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- for _, v := range h {
- v.ServeHTTP(w, r)
- }
- }),
- }
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/chain_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/chain_test.go
deleted file mode 100644
index b788c526b..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/chain_test.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package handlers
-
-import (
- "bytes"
- "net/http"
- "testing"
-)
-
-func TestChaining(t *testing.T) {
- var buf bytes.Buffer
-
- a := func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('a')
- }
- b := func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('b')
- }
- c := func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('c')
- }
- f := NewChainableHandler(http.HandlerFunc(a)).Chain(http.HandlerFunc(b)).Chain(http.HandlerFunc(c))
- f.ServeHTTP(nil, nil)
-
- if buf.String() != "abc" {
- t.Errorf("expected 'abc', got %s", buf.String())
- }
-}
-
-func TestChainingWithHelperFunc(t *testing.T) {
- var buf bytes.Buffer
-
- a := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('a')
- })
- b := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('b')
- })
- c := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('c')
- })
- d := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('d')
- })
- f := ChainHandlers(a, b, c, d)
- f.ServeHTTP(nil, nil)
-
- if buf.String() != "abcd" {
- t.Errorf("expected 'abcd', got %s", buf.String())
- }
-}
-
-func TestChainingMixed(t *testing.T) {
- var buf bytes.Buffer
-
- a := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('a')
- })
- b := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('b')
- })
- c := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('c')
- })
- d := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- buf.WriteRune('d')
- })
- f := NewChainableHandler(a).Chain(ChainHandlers(b, c)).Chain(d)
- f.ServeHTTP(nil, nil)
-
- if buf.String() != "abcd" {
- t.Errorf("expected 'abcd', got %s", buf.String())
- }
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/context.go b/vendor/github.com/PuerkitoBio/ghost/handlers/context.go
deleted file mode 100644
index ccac8c302..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/context.go
+++ /dev/null
@@ -1,55 +0,0 @@
-package handlers
-
-import (
- "net/http"
-)
-
-// Structure that holds the context map and exposes the ResponseWriter interface.
-type contextResponseWriter struct {
- http.ResponseWriter
- m map[interface{}]interface{}
-}
-
-// Implement the WrapWriter interface.
-func (this *contextResponseWriter) WrappedWriter() http.ResponseWriter {
- return this.ResponseWriter
-}
-
-// ContextHandlerFunc is the same as ContextHandler, it is just a convenience
-// signature that accepts a func(http.ResponseWriter, *http.Request) instead of
-// a http.Handler interface. It saves the boilerplate http.HandlerFunc() cast.
-func ContextHandlerFunc(h http.HandlerFunc, cap int) http.HandlerFunc {
- return ContextHandler(h, cap)
-}
-
-// ContextHandler gives a context storage that lives only for the duration of
-// the request, with no locking involved.
-func ContextHandler(h http.Handler, cap int) http.HandlerFunc {
- return func(w http.ResponseWriter, r *http.Request) {
- if _, ok := GetContext(w); ok {
- // Self-awareness, context handler is already set up
- h.ServeHTTP(w, r)
- return
- }
-
- // Create the context-providing ResponseWriter replacement.
- ctxw := &contextResponseWriter{
- w,
- make(map[interface{}]interface{}, cap),
- }
- // Call the wrapped handler with the context-aware writer
- h.ServeHTTP(ctxw, r)
- }
-}
-
-// Helper function to retrieve the context map from the ResponseWriter interface.
-func GetContext(w http.ResponseWriter) (map[interface{}]interface{}, bool) {
- ctxw, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*contextResponseWriter)
- return ok
- })
- if ok {
- return ctxw.(*contextResponseWriter).m, true
- }
- return nil, false
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/context_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/context_test.go
deleted file mode 100644
index 3ef3f4009..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/context_test.go
+++ /dev/null
@@ -1,83 +0,0 @@
-package handlers
-
-import (
- "fmt"
- "net/http"
- "net/http/httptest"
- "testing"
-)
-
-func TestContext(t *testing.T) {
- key := "key"
- val := 10
- body := "this is the output"
-
- h2 := wrappedHandler(t, key, val, body)
- // Create the context handler with a wrapped handler
- h := ContextHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- ctx, _ := GetContext(w)
- assertTrue(ctx != nil, "expected context to be non-nil", t)
- assertTrue(len(ctx) == 0, fmt.Sprintf("expected context to be empty, got %d", len(ctx)), t)
- ctx[key] = val
- h2.ServeHTTP(w, r)
- }), 2)
- s := httptest.NewServer(h)
- defer s.Close()
-
- // First call
- res, err := http.DefaultClient.Get(s.URL)
- if err != nil {
- panic(err)
- }
- res.Body.Close()
- // Second call, context should be cleaned at start
- res, err = http.DefaultClient.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte(body), res, t)
-}
-
-func TestWrappedContext(t *testing.T) {
- key := "key"
- val := 10
- body := "this is the output"
-
- h2 := wrappedHandler(t, key, val, body)
- h := ContextHandler(LogHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- ctx, _ := GetContext(w)
- if !assertTrue(ctx != nil, "expected context to be non-nil", t) {
- panic("ctx is nil")
- }
- assertTrue(len(ctx) == 0, fmt.Sprintf("expected context to be empty, got %d", len(ctx)), t)
- ctx[key] = val
- h2.ServeHTTP(w, r)
- }), NewLogOptions(nil, "%s", "url")), 2)
- s := httptest.NewServer(h)
- defer s.Close()
-
- res, err := http.DefaultClient.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte(body), res, t)
-}
-
-func wrappedHandler(t *testing.T, k, v interface{}, body string) http.Handler {
- return http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- ctx, _ := GetContext(w)
- ac := ctx[k]
- assertTrue(ac == v, fmt.Sprintf("expected value to be %v, got %v", v, ac), t)
-
- // Actually write something
- _, err := w.Write([]byte(body))
- if err != nil {
- panic(err)
- }
- })
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/doc.go b/vendor/github.com/PuerkitoBio/ghost/handlers/doc.go
deleted file mode 100644
index 642c2991a..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/doc.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Package handlers define reusable handler components that focus on offering
-// a single well-defined feature. Note that any http.Handler implementation
-// can be used with Ghost's chainable or wrappable handlers design.
-//
-// Go's standard library provides a number of such useful handlers in net/http:
-//
-// - FileServer(http.FileSystem)
-// - NotFoundHandler()
-// - RedirectHandler(string, int)
-// - StripPrefix(string, http.Handler)
-// - TimeoutHandler(http.Handler, time.Duration, string)
-//
-// This package adds the following list of handlers:
-//
-// - BasicAuthHandler(http.Handler, func(string, string) (interface{}, bool), string)
-// a Basic Authentication handler.
-// - ContextHandler(http.Handler, int) : a volatile storage map valid only
-// for the duration of the request, with no locking required.
-// - FaviconHandler(http.Handler, string, time.Duration) : an efficient favicon
-// handler.
-// - GZIPHandler(http.Handler) : compress the content of the body if the client
-// accepts gzip compression.
-// - LogHandler(http.Handler, *LogOptions) : customizable request logger.
-// - PanicHandler(http.Handler) : handle panics gracefully so that the client
-// receives a response (status code 500).
-// - SessionHandler(http.Handler, *SessionOptions) : a cookie-based, store-agnostic
-// persistent session handler.
-// - StaticFileHandler(string) : serve the contents of a specific file.
-package handlers
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/favicon.go b/vendor/github.com/PuerkitoBio/ghost/handlers/favicon.go
deleted file mode 100644
index 0f460fa96..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/favicon.go
+++ /dev/null
@@ -1,71 +0,0 @@
-package handlers
-
-import (
- "crypto/md5"
- "io/ioutil"
- "net/http"
- "strconv"
- "time"
-
- "github.com/PuerkitoBio/ghost"
-)
-
-// FaviconHandlerFunc is the same as FaviconHandler, it is just a convenience
-// signature that accepts a func(http.ResponseWriter, *http.Request) instead of
-// a http.Handler interface. It saves the boilerplate http.HandlerFunc() cast.
-func FaviconHandlerFunc(h http.HandlerFunc, path string, maxAge time.Duration) http.HandlerFunc {
- return FaviconHandler(h, path, maxAge)
-}
-
-// Efficient favicon handler, mostly a port of node's Connect library implementation
-// of the favicon middleware.
-// https://github.com/senchalabs/connect
-func FaviconHandler(h http.Handler, path string, maxAge time.Duration) http.HandlerFunc {
- var buf []byte
- var hash string
-
- return func(w http.ResponseWriter, r *http.Request) {
- var err error
- if r.URL.Path == "/favicon.ico" {
- if buf == nil {
- // Read from file and cache
- ghost.LogFn("ghost.favicon : serving from %s", path)
- buf, err = ioutil.ReadFile(path)
- if err != nil {
- ghost.LogFn("ghost.favicon : error reading file : %s", err)
- http.NotFound(w, r)
- return
- }
- hash = hashContent(buf)
- }
- writeHeaders(w.Header(), buf, maxAge, hash)
- writeBody(w, r, buf)
- } else {
- h.ServeHTTP(w, r)
- }
- }
-}
-
-// Write the content of the favicon, or respond with a 404 not found
-// in case of error (hardly a critical error).
-func writeBody(w http.ResponseWriter, r *http.Request, buf []byte) {
- _, err := w.Write(buf)
- if err != nil {
- ghost.LogFn("ghost.favicon : error writing response : %s", err)
- http.NotFound(w, r)
- }
-}
-
-// Correctly set the http headers.
-func writeHeaders(hdr http.Header, buf []byte, maxAge time.Duration, hash string) {
- hdr.Set("Content-Type", "image/x-icon")
- hdr.Set("Content-Length", strconv.Itoa(len(buf)))
- hdr.Set("Etag", hash)
- hdr.Set("Cache-Control", "public, max-age="+strconv.Itoa(int(maxAge.Seconds())))
-}
-
-// Get the MD5 hash of the content.
-func hashContent(buf []byte) string {
- h := md5.New()
- return string(h.Sum(buf))
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/favicon_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/favicon_test.go
deleted file mode 100644
index b9d40679b..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/favicon_test.go
+++ /dev/null
@@ -1,72 +0,0 @@
-package handlers
-
-import (
- "net/http"
- "net/http/httptest"
- "os"
- "testing"
- "time"
-)
-
-func TestFavicon(t *testing.T) {
- s := httptest.NewServer(FaviconHandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- w.Write([]byte("ok"))
- }, "./testdata/favicon.ico", time.Second))
- defer s.Close()
-
- res, err := http.Get(s.URL + "/favicon.ico")
- if err != nil {
- panic(err)
- }
- defer res.Body.Close()
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Type", "image/x-icon", res, t)
- assertHeader("Cache-Control", "public, max-age=1", res, t)
- assertHeader("Content-Length", "1406", res, t)
-}
-
-func TestFaviconInvalidPath(t *testing.T) {
- s := httptest.NewServer(FaviconHandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- w.Write([]byte("ok"))
- }, "./testdata/xfavicon.ico", time.Second))
- defer s.Close()
-
- res, err := http.Get(s.URL + "/favicon.ico")
- if err != nil {
- panic(err)
- }
- defer res.Body.Close()
- assertStatus(http.StatusNotFound, res.StatusCode, t)
-}
-
-func TestFaviconFromCache(t *testing.T) {
- s := httptest.NewServer(FaviconHandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- w.Write([]byte("ok"))
- }, "./testdata/favicon.ico", time.Second))
- defer s.Close()
-
- res, err := http.Get(s.URL + "/favicon.ico")
- if err != nil {
- panic(err)
- }
- defer res.Body.Close()
-
- // Rename the file temporarily
- err = os.Rename("./testdata/favicon.ico", "./testdata/xfavicon.ico")
- if err != nil {
- panic(err)
- }
- defer os.Rename("./testdata/xfavicon.ico", "./testdata/favicon.ico")
- res, err = http.Get(s.URL + "/favicon.ico")
- if err != nil {
- panic(err)
- }
- defer res.Body.Close()
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Type", "image/x-icon", res, t)
- assertHeader("Cache-Control", "public, max-age=1", res, t)
- assertHeader("Content-Length", "1406", res, t)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/ghost.go b/vendor/github.com/PuerkitoBio/ghost/handlers/ghost.go
deleted file mode 100644
index 2707e75f2..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/ghost.go
+++ /dev/null
@@ -1,75 +0,0 @@
-package handlers
-
-import (
- "net/http"
-)
-
-// Interface giving easy access to the most common augmented features.
-type GhostWriter interface {
- http.ResponseWriter
- UserName() string
- User() interface{}
- Context() map[interface{}]interface{}
- Session() *Session
-}
-
-// Internal implementation of the GhostWriter interface.
-type ghostWriter struct {
- http.ResponseWriter
- userName string
- user interface{}
- ctx map[interface{}]interface{}
- ssn *Session
-}
-
-func (this *ghostWriter) UserName() string {
- return this.userName
-}
-
-func (this *ghostWriter) User() interface{} {
- return this.user
-}
-
-func (this *ghostWriter) Context() map[interface{}]interface{} {
- return this.ctx
-}
-
-func (this *ghostWriter) Session() *Session {
- return this.ssn
-}
-
-// Convenience handler that wraps a custom function with direct access to the
-// authenticated user, context and session on the writer.
-func GhostHandlerFunc(h func(w GhostWriter, r *http.Request)) http.HandlerFunc {
- return func(w http.ResponseWriter, r *http.Request) {
- if gw, ok := getGhostWriter(w); ok {
- // Self-awareness
- h(gw, r)
- return
- }
- uid, _ := GetUserName(w)
- usr, _ := GetUser(w)
- ctx, _ := GetContext(w)
- ssn, _ := GetSession(w)
- gw := &ghostWriter{
- w,
- uid,
- usr,
- ctx,
- ssn,
- }
- h(gw, r)
- }
-}
-
-// Check the writer chain to find a ghostWriter.
-func getGhostWriter(w http.ResponseWriter) (*ghostWriter, bool) {
- gw, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*ghostWriter)
- return ok
- })
- if ok {
- return gw.(*ghostWriter), true
- }
- return nil, false
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/gzip.go b/vendor/github.com/PuerkitoBio/ghost/handlers/gzip.go
deleted file mode 100644
index 0d772a859..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/gzip.go
+++ /dev/null
@@ -1,168 +0,0 @@
-package handlers
-
-import (
- "compress/gzip"
- "io"
- "net/http"
-)
-
-// Thanks to Andrew Gerrand for inspiration:
-// https://groups.google.com/d/msg/golang-nuts/eVnTcMwNVjM/4vYU8id9Q2UJ
-//
-// Also, node's Connect library implementation of the compress middleware:
-// https://github.com/senchalabs/connect/blob/master/lib/middleware/compress.js
-//
-// And StackOverflow's explanation of Vary: Accept-Encoding header:
-// http://stackoverflow.com/questions/7848796/what-does-varyaccept-encoding-mean
-
-// Internal gzipped writer that satisfies both the (body) writer in gzipped format,
-// and maintains the rest of the ResponseWriter interface for header manipulation.
-type gzipResponseWriter struct {
- io.Writer
- http.ResponseWriter
- r *http.Request // Keep a hold of the Request, for the filter function
- filtered bool // Has the request been run through the filter function?
- dogzip bool // Should we do GZIP compression for this request?
- filterFn func(http.ResponseWriter, *http.Request) bool
-}
-
-// Make sure the filter function is applied.
-func (w *gzipResponseWriter) applyFilter() {
- if !w.filtered {
- if w.dogzip = w.filterFn(w, w.r); w.dogzip {
- setGzipHeaders(w.Header())
- }
- w.filtered = true
- }
-}
-
-// Unambiguous Write() implementation (otherwise both ResponseWriter and Writer
-// want to claim this method).
-func (w *gzipResponseWriter) Write(b []byte) (int, error) {
- w.applyFilter()
- if w.dogzip {
- // Write compressed
- return w.Writer.Write(b)
- }
- // Write uncompressed
- return w.ResponseWriter.Write(b)
-}
-
-// Intercept the WriteHeader call to correctly set the GZIP headers.
-func (w *gzipResponseWriter) WriteHeader(code int) {
- w.applyFilter()
- w.ResponseWriter.WriteHeader(code)
-}
-
-// Implement WrapWriter interface
-func (w *gzipResponseWriter) WrappedWriter() http.ResponseWriter {
- return w.ResponseWriter
-}
-
-var (
- defaultFilterTypes = [...]string{
- "text",
- "javascript",
- "json",
- }
-)
-
-// Default filter to check if the response should be GZIPped.
-// By default, all text (html, css, xml, ...), javascript and json
-// content types are candidates for GZIP.
-func defaultFilter(w http.ResponseWriter, r *http.Request) bool {
- hdr := w.Header()
- for _, tp := range defaultFilterTypes {
- ok := HeaderMatch(hdr, "Content-Type", HmContains, tp)
- if ok {
- return true
- }
- }
- return false
-}
-
-// GZIPHandlerFunc is the same as GZIPHandler, it is just a convenience
-// signature that accepts a func(http.ResponseWriter, *http.Request) instead of
-// a http.Handler interface. It saves the boilerplate http.HandlerFunc() cast.
-func GZIPHandlerFunc(h http.HandlerFunc, filterFn func(http.ResponseWriter, *http.Request) bool) http.HandlerFunc {
- return GZIPHandler(h, filterFn)
-}
-
-// Gzip compression HTTP handler. If the client supports it, it compresses the response
-// written by the wrapped handler. The filter function is called when the response is about
-// to be written to determine if compression should be applied. If this argument is nil,
-// the default filter will GZIP only content types containing /json|text|javascript/.
-func GZIPHandler(h http.Handler, filterFn func(http.ResponseWriter, *http.Request) bool) http.HandlerFunc {
- if filterFn == nil {
- filterFn = defaultFilter
- }
- return func(w http.ResponseWriter, r *http.Request) {
- if _, ok := getGzipWriter(w); ok {
- // Self-awareness, gzip handler is already set up
- h.ServeHTTP(w, r)
- return
- }
- hdr := w.Header()
- setVaryHeader(hdr)
-
- // Do nothing on a HEAD request
- if r.Method == "HEAD" {
- h.ServeHTTP(w, r)
- return
- }
- if !acceptsGzip(r.Header) {
- // No gzip support from the client, return uncompressed
- h.ServeHTTP(w, r)
- return
- }
-
- // Prepare a gzip response container
- gz := gzip.NewWriter(w)
- gzw := &gzipResponseWriter{
- Writer: gz,
- ResponseWriter: w,
- r: r,
- filterFn: filterFn,
- }
- h.ServeHTTP(gzw, r)
- // Iff the handler completed successfully (no panic) and GZIP was indeed used, close the gzip writer,
- // which seems to generate a Write to the underlying writer.
- if gzw.dogzip {
- gz.Close()
- }
- }
-}
-
-// Add the vary by "accept-encoding" header if it is not already set.
-func setVaryHeader(hdr http.Header) {
- if !HeaderMatch(hdr, "Vary", HmContains, "accept-encoding") {
- hdr.Add("Vary", "Accept-Encoding")
- }
-}
-
-// Checks if the client accepts GZIP-encoded responses.
-func acceptsGzip(hdr http.Header) bool {
- ok := HeaderMatch(hdr, "Accept-Encoding", HmContains, "gzip")
- if !ok {
- ok = HeaderMatch(hdr, "Accept-Encoding", HmEquals, "*")
- }
- return ok
-}
-
-func setGzipHeaders(hdr http.Header) {
- // The content-type will be explicitly set somewhere down the path of handlers
- hdr.Set("Content-Encoding", "gzip")
- hdr.Del("Content-Length")
-}
-
-// Helper function to retrieve the gzip writer.
-func getGzipWriter(w http.ResponseWriter) (*gzipResponseWriter, bool) {
- gz, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*gzipResponseWriter)
- return ok
- })
- if ok {
- return gz.(*gzipResponseWriter), true
- }
- return nil, false
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/gzip_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/gzip_test.go
deleted file mode 100644
index 94cdbd1ba..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/gzip_test.go
+++ /dev/null
@@ -1,178 +0,0 @@
-package handlers
-
-import (
- "net/http"
- "net/http/httptest"
- "testing"
-)
-
-func TestGzipped(t *testing.T) {
- body := "This is the body"
- headers := []string{"gzip", "*", "gzip, deflate, sdch"}
-
- h := GZIPHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- w.Header().Set("Content-Type", "text/plain")
- _, err := w.Write([]byte(body))
- if err != nil {
- panic(err)
- }
- }), nil)
- s := httptest.NewServer(h)
- defer s.Close()
-
- for _, hdr := range headers {
- t.Logf("running with Accept-Encoding header %s", hdr)
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- req.Header.Set("Accept-Encoding", hdr)
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Encoding", "gzip", res, t)
- assertGzippedBody([]byte(body), res, t)
- }
-}
-
-func TestNoGzip(t *testing.T) {
- body := "This is the body"
-
- h := GZIPHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- w.Header().Set("Content-Type", "text/plain")
- _, err := w.Write([]byte(body))
- if err != nil {
- panic(err)
- }
- }), nil)
- s := httptest.NewServer(h)
- defer s.Close()
-
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Encoding", "", res, t)
- assertBody([]byte(body), res, t)
-}
-
-func TestGzipOuterPanic(t *testing.T) {
- msg := "ko"
-
- h := PanicHandler(
- GZIPHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- panic(msg)
- }), nil), nil)
- s := httptest.NewServer(h)
- defer s.Close()
-
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusInternalServerError, res.StatusCode, t)
- assertHeader("Content-Encoding", "", res, t)
- assertBody([]byte(msg+"\n"), res, t)
-}
-
-func TestNoGzipOnFilter(t *testing.T) {
- body := "This is the body"
-
- h := GZIPHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- w.Header().Set("Content-Type", "x/x")
- _, err := w.Write([]byte(body))
- if err != nil {
- panic(err)
- }
- }), nil)
- s := httptest.NewServer(h)
- defer s.Close()
-
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- req.Header.Set("Accept-Encoding", "gzip")
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Encoding", "", res, t)
- assertBody([]byte(body), res, t)
-}
-
-func TestNoGzipOnCustomFilter(t *testing.T) {
- body := "This is the body"
-
- h := GZIPHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- w.Header().Set("Content-Type", "text/plain")
- _, err := w.Write([]byte(body))
- if err != nil {
- panic(err)
- }
- }), func(w http.ResponseWriter, r *http.Request) bool {
- return false
- })
- s := httptest.NewServer(h)
- defer s.Close()
-
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- req.Header.Set("Accept-Encoding", "gzip")
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Encoding", "", res, t)
- assertBody([]byte(body), res, t)
-}
-
-func TestGzipOnCustomFilter(t *testing.T) {
- body := "This is the body"
-
- h := GZIPHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- w.Header().Set("Content-Type", "x/x")
- _, err := w.Write([]byte(body))
- if err != nil {
- panic(err)
- }
- }), func(w http.ResponseWriter, r *http.Request) bool {
- return true
- })
- s := httptest.NewServer(h)
- defer s.Close()
-
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- req.Header.Set("Accept-Encoding", "gzip")
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Encoding", "gzip", res, t)
- assertGzippedBody([]byte(body), res, t)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/header.go b/vendor/github.com/PuerkitoBio/ghost/handlers/header.go
deleted file mode 100644
index f015bfca6..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/header.go
+++ /dev/null
@@ -1,50 +0,0 @@
-package handlers
-
-import (
- "net/http"
- "strings"
-)
-
-// Kind of match to apply to the header check.
-type HeaderMatchType int
-
-const (
- HmEquals HeaderMatchType = iota
- HmStartsWith
- HmEndsWith
- HmContains
-)
-
-// Check if the specified header matches the test string, applying the header match type
-// specified.
-func HeaderMatch(hdr http.Header, nm string, matchType HeaderMatchType, test string) bool {
- // First get the header value
- val := hdr[http.CanonicalHeaderKey(nm)]
- if len(val) == 0 {
- return false
- }
- // Prepare the match test
- test = strings.ToLower(test)
- for _, v := range val {
- v = strings.Trim(strings.ToLower(v), " \n\t")
- switch matchType {
- case HmEquals:
- if v == test {
- return true
- }
- case HmStartsWith:
- if strings.HasPrefix(v, test) {
- return true
- }
- case HmEndsWith:
- if strings.HasSuffix(v, test) {
- return true
- }
- case HmContains:
- if strings.Contains(v, test) {
- return true
- }
- }
- }
- return false
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/log.go b/vendor/github.com/PuerkitoBio/ghost/handlers/log.go
deleted file mode 100644
index 5a43a7171..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/log.go
+++ /dev/null
@@ -1,231 +0,0 @@
-package handlers
-
-// Inspired by node's Connect library implementation of the logging middleware
-// https://github.com/senchalabs/connect
-
-import (
- "fmt"
- "net/http"
- "regexp"
- "strings"
- "time"
-
- "github.com/PuerkitoBio/ghost"
-)
-
-const (
- // Predefined logging formats that can be passed as format string.
- Ldefault = "_default_"
- Lshort = "_short_"
- Ltiny = "_tiny_"
-)
-
-var (
- // Token parser for request and response headers
- rxHeaders = regexp.MustCompile(`^(req|res)\[([^\]]+)\]$`)
-
- // Lookup table for predefined formats
- predefFormats = map[string]struct {
- fmt string
- toks []string
- }{
- Ldefault: {
- `%s - - [%s] "%s %s HTTP/%s" %d %s "%s" "%s"`,
- []string{"remote-addr", "date", "method", "url", "http-version", "status", "res[Content-Length]", "referrer", "user-agent"},
- },
- Lshort: {
- `%s - %s %s HTTP/%s %d %s - %.3f s`,
- []string{"remote-addr", "method", "url", "http-version", "status", "res[Content-Length]", "response-time"},
- },
- Ltiny: {
- `%s %s %d %s - %.3f s`,
- []string{"method", "url", "status", "res[Content-Length]", "response-time"},
- },
- }
-)
-
-// Augmented ResponseWriter implementation that captures the status code for the logger.
-type statusResponseWriter struct {
- http.ResponseWriter
- code int
- oriURL string
-}
-
-// Intercept the WriteHeader call to save the status code.
-func (this *statusResponseWriter) WriteHeader(code int) {
- this.code = code
- this.ResponseWriter.WriteHeader(code)
-}
-
-// Intercept the Write call to save the default status code.
-func (this *statusResponseWriter) Write(data []byte) (int, error) {
- if this.code == 0 {
- this.code = http.StatusOK
- }
- return this.ResponseWriter.Write(data)
-}
-
-// Implement the WrapWriter interface.
-func (this *statusResponseWriter) WrappedWriter() http.ResponseWriter {
- return this.ResponseWriter
-}
-
-// LogHandler options
-type LogOptions struct {
- LogFn func(string, ...interface{}) // Defaults to ghost.LogFn if nil
- Format string
- Tokens []string
- CustomTokens map[string]func(http.ResponseWriter, *http.Request) string
- Immediate bool
- DateFormat string
-}
-
-// Create a new LogOptions struct. The DateFormat defaults to time.RFC3339.
-func NewLogOptions(l func(string, ...interface{}), ft string, tok ...string) *LogOptions {
- return &LogOptions{
- LogFn: l,
- Format: ft,
- Tokens: tok,
- CustomTokens: make(map[string]func(http.ResponseWriter, *http.Request) string),
- DateFormat: time.RFC3339,
- }
-}
-
-// LogHandlerFunc is the same as LogHandler, it is just a convenience
-// signature that accepts a func(http.ResponseWriter, *http.Request) instead of
-// a http.Handler interface. It saves the boilerplate http.HandlerFunc() cast.
-func LogHandlerFunc(h http.HandlerFunc, opts *LogOptions) http.HandlerFunc {
- return LogHandler(h, opts)
-}
-
-// Create a log handler for every request it receives.
-func LogHandler(h http.Handler, opts *LogOptions) http.HandlerFunc {
- return func(w http.ResponseWriter, r *http.Request) {
- if _, ok := getStatusWriter(w); ok {
- // Self-awareness, logging handler already set up
- h.ServeHTTP(w, r)
- return
- }
-
- // Save the response start time
- st := time.Now()
- // Call the wrapped handler, with the augmented ResponseWriter to handle the status code
- stw := &statusResponseWriter{w, 0, ""}
-
- // Log immediately if requested, otherwise on exit
- if opts.Immediate {
- logRequest(stw, r, st, opts)
- } else {
- // Store original URL, may get modified by handlers (i.e. StripPrefix)
- stw.oriURL = r.URL.String()
- defer logRequest(stw, r, st, opts)
- }
- h.ServeHTTP(stw, r)
- }
-}
-
-func getIpAddress(r *http.Request) string {
- hdr := r.Header
- hdrRealIp := hdr.Get("X-Real-Ip")
- hdrForwardedFor := hdr.Get("X-Forwarded-For")
- if hdrRealIp == "" && hdrForwardedFor == "" {
- return r.RemoteAddr
- }
- if hdrForwardedFor != "" {
- // X-Forwarded-For is potentially a list of addresses separated with ","
- part := strings.Split(hdrForwardedFor, ",")[0]
- return strings.TrimSpace(part) + ":0"
- }
- return hdrRealIp
-}
-
-// Check if the specified token is a predefined one, and if so return its current value.
-func getPredefinedTokenValue(t string, w *statusResponseWriter, r *http.Request,
- st time.Time, opts *LogOptions) (interface{}, bool) {
-
- switch t {
- case "http-version":
- return fmt.Sprintf("%d.%d", r.ProtoMajor, r.ProtoMinor), true
- case "response-time":
- return time.Now().Sub(st).Seconds(), true
- case "remote-addr":
- return getIpAddress(r), true
- case "date":
- return time.Now().Format(opts.DateFormat), true
- case "method":
- return r.Method, true
- case "url":
- if w.oriURL != "" {
- return w.oriURL, true
- }
- return r.URL.String(), true
- case "referrer", "referer":
- return r.Referer(), true
- case "user-agent":
- return r.UserAgent(), true
- case "status":
- return w.code, true
- }
-
- // Handle special cases for header
- mtch := rxHeaders.FindStringSubmatch(t)
- if len(mtch) > 2 {
- if mtch[1] == "req" {
- return r.Header.Get(mtch[2]), true
- } else {
- // This only works for headers explicitly set via the Header() map of
- // the writer, not those added by the http package under the covers.
- return w.Header().Get(mtch[2]), true
- }
- }
- return nil, false
-}
-
-// Do the actual logging.
-func logRequest(w *statusResponseWriter, r *http.Request, st time.Time, opts *LogOptions) {
- var (
- fn func(string, ...interface{})
- ok bool
- format string
- toks []string
- )
-
- // If no specific log function, use the default one from the ghost package
- if opts.LogFn == nil {
- fn = ghost.LogFn
- } else {
- fn = opts.LogFn
- }
-
- // If this is a predefined format, use it instead
- if v, ok := predefFormats[opts.Format]; ok {
- format = v.fmt
- toks = v.toks
- } else {
- format = opts.Format
- toks = opts.Tokens
- }
- args := make([]interface{}, len(toks))
- for i, t := range toks {
- if args[i], ok = getPredefinedTokenValue(t, w, r, st, opts); !ok {
- if f, ok := opts.CustomTokens[t]; ok && f != nil {
- args[i] = f(w, r)
- } else {
- args[i] = "?"
- }
- }
- }
- fn(format, args...)
-}
-
-// Helper function to retrieve the status writer.
-func getStatusWriter(w http.ResponseWriter) (*statusResponseWriter, bool) {
- st, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*statusResponseWriter)
- return ok
- })
- if ok {
- return st.(*statusResponseWriter), true
- }
- return nil, false
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/log_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/log_test.go
deleted file mode 100644
index 8bd213fc8..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/log_test.go
+++ /dev/null
@@ -1,217 +0,0 @@
-package handlers
-
-import (
- "bytes"
- "fmt"
- "log"
- "net/http"
- "net/http/httptest"
- "regexp"
- "testing"
- "time"
-)
-
-type testCase struct {
- tok string
- fmt string
- rx *regexp.Regexp
-}
-
-func TestLog(t *testing.T) {
- log.SetFlags(0)
- now := time.Now()
-
- formats := []testCase{
- testCase{"remote-addr",
- "%s",
- regexp.MustCompile(`^127\.0\.0\.1:\d+\n$`),
- },
- testCase{"date",
- "%s",
- regexp.MustCompile(`^` + fmt.Sprintf("%04d-%02d-%02d", now.Year(), now.Month(), now.Day()) + `\n$`),
- },
- testCase{"method",
- "%s",
- regexp.MustCompile(`^GET\n$`),
- },
- testCase{"url",
- "%s",
- regexp.MustCompile(`^/\n$`),
- },
- testCase{"http-version",
- "%s",
- regexp.MustCompile(`^1\.1\n$`),
- },
- testCase{"status",
- "%d",
- regexp.MustCompile(`^200\n$`),
- },
- testCase{"referer",
- "%s",
- regexp.MustCompile(`^http://www\.test\.com\n$`),
- },
- testCase{"referrer",
- "%s",
- regexp.MustCompile(`^http://www\.test\.com\n$`),
- },
- testCase{"user-agent",
- "%s",
- regexp.MustCompile(`^Go \d+\.\d+ package http\n$`),
- },
- testCase{"bidon",
- "%s",
- regexp.MustCompile(`^\?\n$`),
- },
- testCase{"response-time",
- "%.3f",
- regexp.MustCompile(`^0\.1\d\d\n$`),
- },
- testCase{"req[Accept-Encoding]",
- "%s",
- regexp.MustCompile(`^gzip\n$`),
- },
- testCase{"res[blah]",
- "%s",
- regexp.MustCompile(`^$`),
- },
- testCase{"tiny",
- Ltiny,
- regexp.MustCompile(`^GET / 200 - 0\.1\d\d s\n$`),
- },
- testCase{"short",
- Lshort,
- regexp.MustCompile(`^127\.0\.0\.1:\d+ - GET / HTTP/1\.1 200 - 0\.1\d\d s\n$`),
- },
- testCase{"default",
- Ldefault,
- regexp.MustCompile(`^127\.0\.0\.1:\d+ - - \[\d{4}-\d{2}-\d{2}\] "GET / HTTP/1\.1" 200 "http://www\.test\.com" "Go \d+\.\d+ package http"\n$`),
- },
- testCase{"res[Content-Type]",
- "%s",
- regexp.MustCompile(`^text/plain\n$`),
- },
- }
- for _, tc := range formats {
- testLogCase(tc, t)
- }
-}
-
-func testLogCase(tc testCase, t *testing.T) {
- buf := bytes.NewBuffer(nil)
- log.SetOutput(buf)
- opts := NewLogOptions(log.Printf, tc.fmt, tc.tok)
- opts.DateFormat = "2006-01-02"
- h := LogHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- time.Sleep(100 * time.Millisecond)
- w.Header().Set("Content-Type", "text/plain")
- w.WriteHeader(200)
- w.Write([]byte("body"))
- }), opts)
-
- s := httptest.NewServer(h)
- defer s.Close()
- t.Logf("running %s...", tc.tok)
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- req.Header.Set("Referer", "http://www.test.com")
- req.Header.Set("Accept-Encoding", "gzip")
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- ac := buf.String()
- assertTrue(tc.rx.MatchString(ac), fmt.Sprintf("expected log to match '%s', got '%s'", tc.rx.String(), ac), t)
-}
-
-func TestForwardedFor(t *testing.T) {
- rx := regexp.MustCompile(`^1\.1\.1\.1:0 - - \[\d{4}-\d{2}-\d{2}\] "GET / HTTP/1\.1" 200 "http://www\.test\.com" "Go \d+\.\d+ package http"\n$`)
-
- buf := bytes.NewBuffer(nil)
- log.SetOutput(buf)
- opts := NewLogOptions(log.Printf, Ldefault)
- opts.DateFormat = "2006-01-02"
-
- h := LogHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- time.Sleep(100 * time.Millisecond)
- w.Header().Set("Content-Type", "text/plain")
- w.WriteHeader(200)
- w.Write([]byte("body"))
- }), opts)
-
- s := httptest.NewServer(h)
- defer s.Close()
- t.Logf("running ForwardedFor...")
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- req.Header.Set("Referer", "http://www.test.com")
- req.Header.Set("X-Forwarded-For", "1.1.1.1")
- req.Header.Set("Accept-Encoding", "gzip")
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- ac := buf.String()
- assertTrue(rx.MatchString(ac), fmt.Sprintf("expected log to match '%s', got '%s'", rx.String(), ac), t)
-}
-
-func TestImmediate(t *testing.T) {
- buf := bytes.NewBuffer(nil)
- log.SetFlags(0)
- log.SetOutput(buf)
- opts := NewLogOptions(nil, Ltiny)
- opts.Immediate = true
- h := LogHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- time.Sleep(100 * time.Millisecond)
- w.WriteHeader(200)
- w.Write([]byte("body"))
- }), opts)
- s := httptest.NewServer(h)
- defer s.Close()
-
- res, err := http.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- ac := buf.String()
- // Since it is Immediate logging, status is still 0 and response time is less than 100ms
- rx := regexp.MustCompile(`GET / 0 - 0\.0\d\d s\n`)
- assertTrue(rx.MatchString(ac), fmt.Sprintf("expected log to match '%s', got '%s'", rx.String(), ac), t)
-}
-
-func TestCustom(t *testing.T) {
- buf := bytes.NewBuffer(nil)
- log.SetFlags(0)
- log.SetOutput(buf)
- opts := NewLogOptions(nil, "%s %s", "method", "custom")
- opts.CustomTokens["custom"] = func(w http.ResponseWriter, r *http.Request) string {
- return "toto"
- }
-
- h := LogHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- time.Sleep(100 * time.Millisecond)
- w.WriteHeader(200)
- w.Write([]byte("body"))
- }), opts)
- s := httptest.NewServer(h)
- defer s.Close()
-
- res, err := http.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- ac := buf.String()
- rx := regexp.MustCompile(`GET toto`)
- assertTrue(rx.MatchString(ac), fmt.Sprintf("expected log to match '%s', got '%s'", rx.String(), ac), t)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/panic.go b/vendor/github.com/PuerkitoBio/ghost/handlers/panic.go
deleted file mode 100644
index e1362c22b..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/panic.go
+++ /dev/null
@@ -1,57 +0,0 @@
-package handlers
-
-import (
- "fmt"
- "net/http"
-)
-
-// Augmented response writer to hold the panic data (can be anything, not necessarily an error
-// interface).
-type errResponseWriter struct {
- http.ResponseWriter
- perr interface{}
-}
-
-// Implement the WrapWriter interface.
-func (this *errResponseWriter) WrappedWriter() http.ResponseWriter {
- return this.ResponseWriter
-}
-
-// PanicHandlerFunc is the same as PanicHandler, it is just a convenience
-// signature that accepts a func(http.ResponseWriter, *http.Request) instead of
-// a http.Handler interface. It saves the boilerplate http.HandlerFunc() cast.
-func PanicHandlerFunc(h http.HandlerFunc, errH http.HandlerFunc) http.HandlerFunc {
- return PanicHandler(h, errH)
-}
-
-// Calls the wrapped handler and on panic calls the specified error handler. If the error handler is nil,
-// responds with a 500 error message.
-func PanicHandler(h http.Handler, errH http.Handler) http.HandlerFunc {
- return func(w http.ResponseWriter, r *http.Request) {
- defer func() {
- if err := recover(); err != nil {
- if errH != nil {
- ew := &errResponseWriter{w, err}
- errH.ServeHTTP(ew, r)
- } else {
- http.Error(w, fmt.Sprintf("%s", err), http.StatusInternalServerError)
- }
- }
- }()
-
- // Call the protected handler
- h.ServeHTTP(w, r)
- }
-}
-
-// Helper function to retrieve the panic error, if any.
-func GetPanicError(w http.ResponseWriter) (interface{}, bool) {
- er, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*errResponseWriter)
- return ok
- })
- if ok {
- return er.(*errResponseWriter).perr, true
- }
- return nil, false
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/panic_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/panic_test.go
deleted file mode 100644
index de54d0be9..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/panic_test.go
+++ /dev/null
@@ -1,62 +0,0 @@
-package handlers
-
-import (
- "net/http"
- "net/http/httptest"
- "testing"
-)
-
-func TestPanic(t *testing.T) {
- h := PanicHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- panic("test")
- }), nil)
- s := httptest.NewServer(h)
- defer s.Close()
-
- res, err := http.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusInternalServerError, res.StatusCode, t)
-}
-
-func TestNoPanic(t *testing.T) {
- h := PanicHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
-
- }), nil)
- s := httptest.NewServer(h)
- defer s.Close()
-
- res, err := http.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
-}
-
-func TestPanicCustom(t *testing.T) {
- h := PanicHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- panic("ok")
- }),
- http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- err, ok := GetPanicError(w)
- if !ok {
- panic("no panic error found")
- }
- w.WriteHeader(501)
- w.Write([]byte(err.(string)))
- }))
- s := httptest.NewServer(h)
- defer s.Close()
-
- res, err := http.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(501, res.StatusCode, t)
- assertBody([]byte("ok"), res, t)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/redisstore.go b/vendor/github.com/PuerkitoBio/ghost/handlers/redisstore.go
deleted file mode 100644
index 2974e2205..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/redisstore.go
+++ /dev/null
@@ -1,135 +0,0 @@
-package handlers
-
-import (
- "encoding/json"
- "errors"
- "time"
-
- "github.com/garyburd/redigo/redis"
-)
-
-var (
- ErrNoKeyPrefix = errors.New("cannot get session keys without a key prefix")
-)
-
-type RedisStoreOptions struct {
- Network string
- Address string
- ConnectTimeout time.Duration
- ReadTimeout time.Duration
- WriteTimeout time.Duration
- Database int // Redis database to use for session keys
- KeyPrefix string // If set, keys will be KeyPrefix:SessionID (semicolon added)
- BrowserSessServerTTL time.Duration // Defaults to 2 days
-}
-
-type RedisStore struct {
- opts *RedisStoreOptions
- conn redis.Conn
-}
-
-// Create a redis session store with the specified options.
-func NewRedisStore(opts *RedisStoreOptions) *RedisStore {
- var err error
- rs := &RedisStore{opts, nil}
- rs.conn, err = redis.DialTimeout(opts.Network, opts.Address, opts.ConnectTimeout,
- opts.ReadTimeout, opts.WriteTimeout)
- if err != nil {
- panic(err)
- }
- return rs
-}
-
-// Get the session from the store.
-func (this *RedisStore) Get(id string) (*Session, error) {
- key := id
- if this.opts.KeyPrefix != "" {
- key = this.opts.KeyPrefix + ":" + id
- }
- b, err := redis.Bytes(this.conn.Do("GET", key))
- if err != nil {
- return nil, err
- }
- var sess Session
- err = json.Unmarshal(b, &sess)
- if err != nil {
- return nil, err
- }
- return &sess, nil
-}
-
-// Save the session into the store.
-func (this *RedisStore) Set(sess *Session) error {
- b, err := json.Marshal(sess)
- if err != nil {
- return err
- }
- key := sess.ID()
- if this.opts.KeyPrefix != "" {
- key = this.opts.KeyPrefix + ":" + sess.ID()
- }
- ttl := sess.MaxAge()
- if ttl == 0 {
- // Browser session, set to specified TTL
- ttl = this.opts.BrowserSessServerTTL
- if ttl == 0 {
- ttl = 2 * 24 * time.Hour // Default to 2 days
- }
- }
- _, err = this.conn.Do("SETEX", key, int(ttl.Seconds()), b)
- if err != nil {
- return err
- }
- return nil
-}
-
-// Delete the session from the store.
-func (this *RedisStore) Delete(id string) error {
- key := id
- if this.opts.KeyPrefix != "" {
- key = this.opts.KeyPrefix + ":" + id
- }
- _, err := this.conn.Do("DEL", key)
- if err != nil {
- return err
- }
- return nil
-}
-
-// Clear all sessions from the store. Requires the use of a key
-// prefix in the store options, otherwise the method refuses to delete all keys.
-func (this *RedisStore) Clear() error {
- vals, err := this.getSessionKeys()
- if err != nil {
- return err
- }
- if len(vals) > 0 {
- this.conn.Send("MULTI")
- for _, v := range vals {
- this.conn.Send("DEL", v)
- }
- _, err = this.conn.Do("EXEC")
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-// Get the number of session keys in the store. Requires the use of a
-// key prefix in the store options, otherwise returns -1 (cannot tell
-// session keys from other keys).
-func (this *RedisStore) Len() int {
- vals, err := this.getSessionKeys()
- if err != nil {
- return -1
- }
- return len(vals)
-}
-
-func (this *RedisStore) getSessionKeys() ([]interface{}, error) {
- if this.opts.KeyPrefix != "" {
- return redis.Values(this.conn.Do("KEYS", this.opts.KeyPrefix+":*"))
- }
- return nil, ErrNoKeyPrefix
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/reswriter.go b/vendor/github.com/PuerkitoBio/ghost/handlers/reswriter.go
deleted file mode 100644
index 1ae6ad397..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/reswriter.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package handlers
-
-import (
- "net/http"
-)
-
-// This interface can be implemented by an augmented ResponseWriter, so that
-// it doesn't hide other augmented writers in the chain.
-type WrapWriter interface {
- http.ResponseWriter
- WrappedWriter() http.ResponseWriter
-}
-
-// Helper function to retrieve a specific ResponseWriter.
-func GetResponseWriter(w http.ResponseWriter,
- predicate func(http.ResponseWriter) bool) (http.ResponseWriter, bool) {
-
- for {
- // Check if this writer is the one we're looking for
- if w != nil && predicate(w) {
- return w, true
- }
- // If it is a WrapWriter, move back the chain of wrapped writers
- ww, ok := w.(WrapWriter)
- if !ok {
- return nil, false
- }
- w = ww.WrappedWriter()
- }
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/reswriter_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/reswriter_test.go
deleted file mode 100644
index 37db8b428..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/reswriter_test.go
+++ /dev/null
@@ -1,52 +0,0 @@
-package handlers
-
-import (
- "fmt"
- "net/http"
- "testing"
-)
-
-type baseWriter struct{}
-
-func (b *baseWriter) Write(data []byte) (int, error) { return 0, nil }
-func (b *baseWriter) WriteHeader(code int) {}
-func (b *baseWriter) Header() http.Header { return nil }
-
-func TestNilWriter(t *testing.T) {
- rw, ok := GetResponseWriter(nil, func(w http.ResponseWriter) bool {
- return true
- })
- assertTrue(rw == nil, "expected nil, got non-nil", t)
- assertTrue(!ok, "expected false, got true", t)
-}
-
-func TestBaseWriter(t *testing.T) {
- bw := &baseWriter{}
- rw, ok := GetResponseWriter(bw, func(w http.ResponseWriter) bool {
- return true
- })
- assertTrue(rw == bw, fmt.Sprintf("expected %#v, got %#v", bw, rw), t)
- assertTrue(ok, "expected true, got false", t)
-}
-
-func TestWrappedWriter(t *testing.T) {
- bw := &baseWriter{}
- ctx := &contextResponseWriter{bw, nil}
- rw, ok := GetResponseWriter(ctx, func(w http.ResponseWriter) bool {
- _, ok := w.(*baseWriter)
- return ok
- })
- assertTrue(rw == bw, fmt.Sprintf("expected %#v, got %#v", bw, rw), t)
- assertTrue(ok, "expected true, got false", t)
-}
-
-func TestWrappedNotFoundWriter(t *testing.T) {
- bw := &baseWriter{}
- ctx := &contextResponseWriter{bw, nil}
- rw, ok := GetResponseWriter(ctx, func(w http.ResponseWriter) bool {
- _, ok := w.(*statusResponseWriter)
- return ok
- })
- assertTrue(rw == nil, fmt.Sprintf("expected nil, got %#v", rw), t)
- assertTrue(!ok, "expected false, got true", t)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/session.go b/vendor/github.com/PuerkitoBio/ghost/handlers/session.go
deleted file mode 100644
index fb96faa74..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/session.go
+++ /dev/null
@@ -1,321 +0,0 @@
-package handlers
-
-import (
- "encoding/json"
- "errors"
- "hash/crc32"
- "net/http"
- "strings"
- "time"
-
- "github.com/PuerkitoBio/ghost"
- "github.com/gorilla/securecookie"
- "github.com/nu7hatch/gouuid"
-)
-
-const defaultCookieName = "ghost.sid"
-
-var (
- ErrSessionSecretMissing = errors.New("session secret is missing")
- ErrNoSessionID = errors.New("session ID could not be generated")
-)
-
-// The Session holds the data map that persists for the duration of the session.
-// The information stored in this map should be marshalable for the target Session store
-// format (i.e. json, sql, gob, etc. depending on how the store persists the data).
-type Session struct {
- isNew bool // keep private, not saved to JSON, will be false once read from the store
- internalSession
-}
-
-// Use a separate private struct to hold the private fields of the Session,
-// although those fields are exposed (public). This is a trick to simplify
-// JSON encoding.
-type internalSession struct {
- Data map[string]interface{} // JSON cannot marshal a map[interface{}]interface{}
- ID string
- Created time.Time
- MaxAge time.Duration
-}
-
-// Create a new Session instance. It panics in the unlikely event that a new random ID cannot be generated.
-func newSession(maxAge int) *Session {
- uid, err := uuid.NewV4()
- if err != nil {
- panic(ErrNoSessionID)
- }
- return &Session{
- true, // is new
- internalSession{
- make(map[string]interface{}),
- uid.String(),
- time.Now(),
- time.Duration(maxAge) * time.Second,
- },
- }
-}
-
-// Gets the ID of the session.
-func (ø *Session) ID() string {
- return ø.internalSession.ID
-}
-
-// Get the max age duration
-func (ø *Session) MaxAge() time.Duration {
- return ø.internalSession.MaxAge
-}
-
-// Get the creation time of the session.
-func (ø *Session) Created() time.Time {
- return ø.internalSession.Created
-}
-
-// Is this a new Session (created by the current request)
-func (ø *Session) IsNew() bool {
- return ø.isNew
-}
-
-// TODO : Resets the max age property of the session to its original value (sliding expiration).
-func (ø *Session) resetMaxAge() {
-}
-
-// Marshal the session to JSON.
-func (ø *Session) MarshalJSON() ([]byte, error) {
- return json.Marshal(ø.internalSession)
-}
-
-// Unmarshal the JSON into the internal session struct.
-func (ø *Session) UnmarshalJSON(b []byte) error {
- return json.Unmarshal(b, &ø.internalSession)
-}
-
-// Options object for the session handler. It specified the Session store to use for
-// persistence, the template for the session cookie (name, path, maxage, etc.),
-// whether or not the proxy should be trusted to determine if the connection is secure,
-// and the required secret to sign the session cookie.
-type SessionOptions struct {
- Store SessionStore
- CookieTemplate http.Cookie
- TrustProxy bool
- Secret string
-}
-
-// Create a new SessionOptions struct, using default cookie and proxy values.
-func NewSessionOptions(store SessionStore, secret string) *SessionOptions {
- return &SessionOptions{
- Store: store,
- Secret: secret,
- }
-}
-
-// The augmented ResponseWriter struct for the session handler. It holds the current
-// Session object and Session store, as well as flags and function to send the actual
-// session cookie at the end of the request.
-type sessResponseWriter struct {
- http.ResponseWriter
- sess *Session
- sessStore SessionStore
- sessSent bool
- sendCookieFn func()
-}
-
-// Implement the WrapWriter interface.
-func (ø *sessResponseWriter) WrappedWriter() http.ResponseWriter {
- return ø.ResponseWriter
-}
-
-// Intercept the Write() method to add the Set-Cookie header before it's too late.
-func (ø *sessResponseWriter) Write(data []byte) (int, error) {
- if !ø.sessSent {
- ø.sendCookieFn()
- ø.sessSent = true
- }
- return ø.ResponseWriter.Write(data)
-}
-
-// Intercept the WriteHeader() method to add the Set-Cookie header before it's too late.
-func (ø *sessResponseWriter) WriteHeader(code int) {
- if !ø.sessSent {
- ø.sendCookieFn()
- ø.sessSent = true
- }
- ø.ResponseWriter.WriteHeader(code)
-}
-
-// SessionHandlerFunc is the same as SessionHandler, it is just a convenience
-// signature that accepts a func(http.ResponseWriter, *http.Request) instead of
-// a http.Handler interface. It saves the boilerplate http.HandlerFunc() cast.
-func SessionHandlerFunc(h http.HandlerFunc, opts *SessionOptions) http.HandlerFunc {
- return SessionHandler(h, opts)
-}
-
-// Create a Session handler to offer the Session behaviour to the specified handler.
-func SessionHandler(h http.Handler, opts *SessionOptions) http.HandlerFunc {
- // Make sure the required cookie fields are set
- if opts.CookieTemplate.Name == "" {
- opts.CookieTemplate.Name = defaultCookieName
- }
- if opts.CookieTemplate.Path == "" {
- opts.CookieTemplate.Path = "/"
- }
- // Secret is required
- if opts.Secret == "" {
- panic(ErrSessionSecretMissing)
- }
-
- // Return the actual handler
- return func(w http.ResponseWriter, r *http.Request) {
- if _, ok := getSessionWriter(w); ok {
- // Self-awareness
- h.ServeHTTP(w, r)
- return
- }
-
- if strings.Index(r.URL.Path, opts.CookieTemplate.Path) != 0 {
- // Session does not apply to this path
- h.ServeHTTP(w, r)
- return
- }
-
- // Create a new Session or retrieve the existing session based on the
- // session cookie received.
- var sess *Session
- var ckSessId string
- exCk, err := r.Cookie(opts.CookieTemplate.Name)
- if err != nil {
- sess = newSession(opts.CookieTemplate.MaxAge)
- ghost.LogFn("ghost.session : error getting session cookie : %s", err)
- } else {
- ckSessId, err = parseSignedCookie(exCk, opts.Secret)
- if err != nil {
- sess = newSession(opts.CookieTemplate.MaxAge)
- ghost.LogFn("ghost.session : error parsing signed cookie : %s", err)
- } else if ckSessId == "" {
- sess = newSession(opts.CookieTemplate.MaxAge)
- ghost.LogFn("ghost.session : no existing session ID")
- } else {
- // Get the session
- sess, err = opts.Store.Get(ckSessId)
- if err != nil {
- sess = newSession(opts.CookieTemplate.MaxAge)
- ghost.LogFn("ghost.session : error getting session from store : %s", err)
- } else if sess == nil {
- sess = newSession(opts.CookieTemplate.MaxAge)
- ghost.LogFn("ghost.session : nil session")
- }
- }
- }
- // Save the original hash of the session, used to compare if the contents
- // have changed during the handling of the request, so that it has to be
- // saved to the stored.
- oriHash := hash(sess)
-
- // Create the augmented ResponseWriter.
- srw := &sessResponseWriter{w, sess, opts.Store, false, func() {
- // This function is called when the header is about to be written, so that
- // the session cookie is correctly set.
-
- // Check if the connection is secure
- proto := strings.Trim(strings.ToLower(r.Header.Get("X-Forwarded-Proto")), " ")
- tls := r.TLS != nil || (strings.HasPrefix(proto, "https") && opts.TrustProxy)
- if opts.CookieTemplate.Secure && !tls {
- ghost.LogFn("ghost.session : secure cookie on a non-secure connection, cookie not sent")
- return
- }
- if !sess.IsNew() {
- // If this is not a new session, no need to send back the cookie
- // TODO : Handle expires?
- return
- }
-
- // Send the session cookie
- ck := opts.CookieTemplate
- ck.Value = sess.ID()
- err := signCookie(&ck, opts.Secret)
- if err != nil {
- ghost.LogFn("ghost.session : error signing cookie : %s", err)
- return
- }
- http.SetCookie(w, &ck)
- }}
-
- // Call wrapped handler
- h.ServeHTTP(srw, r)
-
- // TODO : Expiration management? srw.sess.resetMaxAge()
- // Do not save if content is the same, unless session is new (to avoid
- // creating a new session and sending a cookie on each successive request).
- if newHash := hash(sess); !sess.IsNew() && oriHash == newHash && newHash != 0 {
- // No changes to the session, no need to save
- ghost.LogFn("ghost.session : no changes to save to store")
- return
- }
- err = opts.Store.Set(sess)
- if err != nil {
- ghost.LogFn("ghost.session : error saving session to store : %s", err)
- }
- }
-}
-
-// Helper function to retrieve the session for the current request.
-func GetSession(w http.ResponseWriter) (*Session, bool) {
- ss, ok := getSessionWriter(w)
- if ok {
- return ss.sess, true
- }
- return nil, false
-}
-
-// Helper function to retrieve the session store
-func GetSessionStore(w http.ResponseWriter) (SessionStore, bool) {
- ss, ok := getSessionWriter(w)
- if ok {
- return ss.sessStore, true
- }
- return nil, false
-}
-
-// Internal helper function to retrieve the session writer object.
-func getSessionWriter(w http.ResponseWriter) (*sessResponseWriter, bool) {
- ss, ok := GetResponseWriter(w, func(tst http.ResponseWriter) bool {
- _, ok := tst.(*sessResponseWriter)
- return ok
- })
- if ok {
- return ss.(*sessResponseWriter), true
- }
- return nil, false
-}
-
-// Parse a signed cookie and return the cookie value
-func parseSignedCookie(ck *http.Cookie, secret string) (string, error) {
- var val string
-
- sck := securecookie.New([]byte(secret), nil)
- err := sck.Decode(ck.Name, ck.Value, &val)
- if err != nil {
- return "", err
- }
- return val, nil
-}
-
-// Sign the specified cookie's value
-func signCookie(ck *http.Cookie, secret string) error {
- sck := securecookie.New([]byte(secret), nil)
- enc, err := sck.Encode(ck.Name, ck.Value)
- if err != nil {
- return err
- }
- ck.Value = enc
- return nil
-}
-
-// Compute a CRC32 hash of the session's JSON-encoded contents.
-func hash(s *Session) uint32 {
- data, err := json.Marshal(s)
- if err != nil {
- ghost.LogFn("ghost.session : error hash : %s", err)
- return 0 // 0 is always treated as "modified" session content
- }
- return crc32.ChecksumIEEE(data)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/session_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/session_test.go
deleted file mode 100644
index ac91dd2c6..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/session_test.go
+++ /dev/null
@@ -1,258 +0,0 @@
-package handlers
-
-import (
- "fmt"
- "io/ioutil"
- "net/http"
- "net/http/cookiejar"
- "net/http/httptest"
- "testing"
- "time"
-)
-
-var (
- store SessionStore
- secret = "butchered at birth"
-)
-
-func TestSession(t *testing.T) {
- stores := map[string]SessionStore{
- "memory": NewMemoryStore(1),
- "redis": NewRedisStore(&RedisStoreOptions{
- Network: "tcp",
- Address: ":6379",
- Database: 1,
- KeyPrefix: "sess",
- }),
- }
- for k, v := range stores {
- t.Logf("testing session with %s store\n", k)
- store = v
- t.Log("SessionExists")
- testSessionExists(t)
- t.Log("SessionPersists")
- testSessionPersists(t)
- t.Log("SessionExpires")
- testSessionExpires(t)
- t.Log("SessionBeforeExpires")
- testSessionBeforeExpires(t)
- t.Log("PanicIfNoSecret")
- testPanicIfNoSecret(t)
- t.Log("InvalidPath")
- testInvalidPath(t)
- t.Log("ValidSubPath")
- testValidSubPath(t)
- t.Log("SecureOverHttp")
- testSecureOverHttp(t)
- }
-}
-
-func setupTest(f func(w http.ResponseWriter, r *http.Request), ckPath string, secure bool, maxAge int) *httptest.Server {
- opts := NewSessionOptions(store, secret)
- if ckPath != "" {
- opts.CookieTemplate.Path = ckPath
- }
- opts.CookieTemplate.Secure = secure
- opts.CookieTemplate.MaxAge = maxAge
- h := SessionHandler(http.HandlerFunc(f), opts)
- return httptest.NewServer(h)
-}
-
-func doRequest(u string, newJar bool) *http.Response {
- var err error
- if newJar {
- http.DefaultClient.Jar, err = cookiejar.New(new(cookiejar.Options))
- if err != nil {
- panic(err)
- }
- }
- res, err := http.Get(u)
- if err != nil {
- panic(err)
- }
- return res
-}
-
-func testSessionExists(t *testing.T) {
- s := setupTest(func(w http.ResponseWriter, r *http.Request) {
- ssn, ok := GetSession(w)
- if assertTrue(ok, "expected session to be non-nil, got nil", t) {
- ssn.Data["foo"] = "bar"
- assertTrue(ssn.Data["foo"] == "bar", fmt.Sprintf("expected ssn[foo] to be 'bar', got %v", ssn.Data["foo"]), t)
- }
- w.Write([]byte("ok"))
- }, "", false, 0)
- defer s.Close()
-
- res := doRequest(s.URL, true)
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte("ok"), res, t)
- assertTrue(len(res.Cookies()) == 1, fmt.Sprintf("expected response to have 1 cookie, got %d", len(res.Cookies())), t)
-}
-
-func testSessionPersists(t *testing.T) {
- cnt := 0
- s := setupTest(func(w http.ResponseWriter, r *http.Request) {
- ssn, ok := GetSession(w)
- if !ok {
- panic("session not found!")
- }
- if cnt == 0 {
- ssn.Data["foo"] = "bar"
- w.Write([]byte("ok"))
- cnt++
- } else {
- w.Write([]byte(ssn.Data["foo"].(string)))
- }
- }, "", false, 0)
- defer s.Close()
-
- // 1st call, set the session value
- res := doRequest(s.URL, true)
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte("ok"), res, t)
-
- // 2nd call, get the session value
- res = doRequest(s.URL, false)
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte("bar"), res, t)
- assertTrue(len(res.Cookies()) == 0, fmt.Sprintf("expected 2nd response to have 0 cookie, got %d", len(res.Cookies())), t)
-}
-
-func testSessionExpires(t *testing.T) {
- cnt := 0
- s := setupTest(func(w http.ResponseWriter, r *http.Request) {
- ssn, ok := GetSession(w)
- if !ok {
- panic("session not found!")
- }
- if cnt == 0 {
- w.Write([]byte(ssn.ID()))
- cnt++
- } else {
- w.Write([]byte(ssn.ID()))
- }
- }, "", false, 1) // Expire in 1 second
- defer s.Close()
-
- // 1st call, set the session value
- res := doRequest(s.URL, true)
- assertStatus(http.StatusOK, res.StatusCode, t)
- id1, err := ioutil.ReadAll(res.Body)
- if err != nil {
- panic(err)
- }
- res.Body.Close()
- time.Sleep(1001 * time.Millisecond)
-
- // 2nd call, get the session value
- res = doRequest(s.URL, false)
- assertStatus(http.StatusOK, res.StatusCode, t)
- id2, err := ioutil.ReadAll(res.Body)
- if err != nil {
- panic(err)
- }
- res.Body.Close()
- sid1, sid2 := string(id1), string(id2)
- assertTrue(len(res.Cookies()) == 1, fmt.Sprintf("expected 2nd response to have 1 cookie, got %d", len(res.Cookies())), t)
- assertTrue(sid1 != sid2, "expected session IDs to be different, got same", t)
-}
-
-func testSessionBeforeExpires(t *testing.T) {
- s := setupTest(func(w http.ResponseWriter, r *http.Request) {
- ssn, ok := GetSession(w)
- if !ok {
- panic("session not found!")
- }
- w.Write([]byte(ssn.ID()))
- }, "", false, 1) // Expire in 1 second
- defer s.Close()
-
- // 1st call, set the session value
- res := doRequest(s.URL, true)
- assertStatus(http.StatusOK, res.StatusCode, t)
- id1, err := ioutil.ReadAll(res.Body)
- if err != nil {
- panic(err)
- }
- res.Body.Close()
- time.Sleep(500 * time.Millisecond)
-
- // 2nd call, get the session value
- res = doRequest(s.URL, false)
- assertStatus(http.StatusOK, res.StatusCode, t)
- id2, err := ioutil.ReadAll(res.Body)
- if err != nil {
- panic(err)
- }
- res.Body.Close()
- sid1, sid2 := string(id1), string(id2)
- assertTrue(len(res.Cookies()) == 0, fmt.Sprintf("expected 2nd response to have no cookie, got %d", len(res.Cookies())), t)
- assertTrue(sid1 == sid2, "expected session IDs to be the same, got different", t)
-}
-
-func testPanicIfNoSecret(t *testing.T) {
- defer assertPanic(t)
- SessionHandler(http.NotFoundHandler(), NewSessionOptions(nil, ""))
-}
-
-func testInvalidPath(t *testing.T) {
- s := setupTest(func(w http.ResponseWriter, r *http.Request) {
- _, ok := GetSession(w)
- assertTrue(!ok, "expected session to be nil, got non-nil", t)
- w.Write([]byte("ok"))
- }, "/foo", false, 0)
- defer s.Close()
-
- res := doRequest(s.URL, true)
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte("ok"), res, t)
- assertTrue(len(res.Cookies()) == 0, fmt.Sprintf("expected response to have no cookie, got %d", len(res.Cookies())), t)
-}
-
-func testValidSubPath(t *testing.T) {
- s := setupTest(func(w http.ResponseWriter, r *http.Request) {
- _, ok := GetSession(w)
- assertTrue(ok, "expected session to be non-nil, got nil", t)
- w.Write([]byte("ok"))
- }, "/foo", false, 0)
- defer s.Close()
-
- res := doRequest(s.URL+"/foo/bar", true)
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte("ok"), res, t)
- assertTrue(len(res.Cookies()) == 1, fmt.Sprintf("expected response to have 1 cookie, got %d", len(res.Cookies())), t)
-}
-
-func testSecureOverHttp(t *testing.T) {
- s := setupTest(func(w http.ResponseWriter, r *http.Request) {
- _, ok := GetSession(w)
- assertTrue(ok, "expected session to be non-nil, got nil", t)
- w.Write([]byte("ok"))
- }, "", true, 0)
- defer s.Close()
-
- res := doRequest(s.URL, true)
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte("ok"), res, t)
- assertTrue(len(res.Cookies()) == 0, fmt.Sprintf("expected response to have no cookie, got %d", len(res.Cookies())), t)
-}
-
-// TODO : commented, certificate problem
-func xtestSecureOverHttps(t *testing.T) {
- opts := NewSessionOptions(store, secret)
- opts.CookieTemplate.Secure = true
- h := SessionHandler(http.HandlerFunc(
- func(w http.ResponseWriter, r *http.Request) {
- _, ok := GetSession(w)
- assertTrue(ok, "expected session to be non-nil, got nil", t)
- w.Write([]byte("ok"))
- }), opts)
- s := httptest.NewTLSServer(h)
- defer s.Close()
-
- res := doRequest(s.URL, true)
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertBody([]byte("ok"), res, t)
- assertTrue(len(res.Cookies()) == 1, fmt.Sprintf("expected response to have 1 cookie, got %d", len(res.Cookies())), t)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/sstore.go b/vendor/github.com/PuerkitoBio/ghost/handlers/sstore.go
deleted file mode 100644
index 624993f49..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/sstore.go
+++ /dev/null
@@ -1,90 +0,0 @@
-package handlers
-
-import (
- "sync"
- "time"
-)
-
-// SessionStore interface, must be implemented by any store to be used
-// for session storage.
-type SessionStore interface {
- Get(id string) (*Session, error) // Get the session from the store
- Set(sess *Session) error // Save the session in the store
- Delete(id string) error // Delete the session from the store
- Clear() error // Delete all sessions from the store
- Len() int // Get the number of sessions in the store
-}
-
-// In-memory implementation of a session store. Not recommended for production
-// use.
-type MemoryStore struct {
- l sync.RWMutex
- m map[string]*Session
- capc int
-}
-
-// Create a new memory store.
-func NewMemoryStore(capc int) *MemoryStore {
- m := &MemoryStore{}
- m.capc = capc
- m.newMap()
- return m
-}
-
-// Get the number of sessions saved in the store.
-func (this *MemoryStore) Len() int {
- return len(this.m)
-}
-
-// Get the requested session from the store.
-func (this *MemoryStore) Get(id string) (*Session, error) {
- this.l.RLock()
- defer this.l.RUnlock()
- return this.m[id], nil
-}
-
-// Save the session to the store.
-func (this *MemoryStore) Set(sess *Session) error {
- this.l.Lock()
- defer this.l.Unlock()
- this.m[sess.ID()] = sess
- if sess.IsNew() {
- // Since the memory store doesn't marshal to a string without the isNew, if it is left
- // to true, it will stay true forever.
- sess.isNew = false
- // Expire in the given time. If the maxAge is 0 (which means browser-session lifetime),
- // expire in a reasonable delay, 2 days. The weird case of a negative maxAge will
- // cause the immediate Delete call.
- wait := sess.MaxAge()
- if wait == 0 {
- wait = 2 * 24 * time.Hour
- }
- go func() {
- // Clear the session after the specified delay
- <-time.After(wait)
- this.Delete(sess.ID())
- }()
- }
- return nil
-}
-
-// Delete the specified session ID from the store.
-func (this *MemoryStore) Delete(id string) error {
- this.l.Lock()
- defer this.l.Unlock()
- delete(this.m, id)
- return nil
-}
-
-// Clear all sessions from the store.
-func (this *MemoryStore) Clear() error {
- this.l.Lock()
- defer this.l.Unlock()
- this.newMap()
- return nil
-}
-
-// Re-create the internal map, dropping all existing sessions.
-func (this *MemoryStore) newMap() {
- this.m = make(map[string]*Session, this.capc)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/static.go b/vendor/github.com/PuerkitoBio/ghost/handlers/static.go
deleted file mode 100644
index 7d070551f..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/static.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package handlers
-
-import (
- "net/http"
-)
-
-// StaticFileHandler, unlike net/http.FileServer, serves the contents of a specific
-// file when it is called.
-func StaticFileHandler(path string) http.HandlerFunc {
- return func(w http.ResponseWriter, r *http.Request) {
- http.ServeFile(w, r, path)
- }
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/static_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/static_test.go
deleted file mode 100644
index 259079d7a..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/static_test.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package handlers
-
-import (
- "net/http"
- "net/http/httptest"
- "testing"
-)
-
-func TestServeFile(t *testing.T) {
- h := StaticFileHandler("./testdata/styles.css")
- s := httptest.NewServer(h)
- defer s.Close()
-
- res, err := http.Get(s.URL)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Type", "text/css; charset=utf-8", res, t)
- assertHeader("Content-Encoding", "", res, t)
- assertBody([]byte(`* {
- background-color: white;
-}`), res, t)
-}
-
-func TestGzippedFile(t *testing.T) {
- h := GZIPHandler(StaticFileHandler("./testdata/styles.css"), nil)
- s := httptest.NewServer(h)
- defer s.Close()
-
- req, err := http.NewRequest("GET", s.URL, nil)
- if err != nil {
- panic(err)
- }
- req.Header.Set("Accept-Encoding", "*")
- res, err := http.DefaultClient.Do(req)
- if err != nil {
- panic(err)
- }
- assertStatus(http.StatusOK, res.StatusCode, t)
- assertHeader("Content-Encoding", "gzip", res, t)
- assertHeader("Content-Type", "text/css; charset=utf-8", res, t)
- assertGzippedBody([]byte(`* {
- background-color: white;
-}`), res, t)
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/favicon.ico b/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/favicon.ico
deleted file mode 100644
index e277138dc..000000000
Binary files a/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/favicon.ico and /dev/null differ
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/script.js b/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/script.js
deleted file mode 100644
index 6fcc9b95f..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/script.js
+++ /dev/null
@@ -1 +0,0 @@
-var a = 0;
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/styles.css b/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/styles.css
deleted file mode 100644
index 616bdba69..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/testdata/styles.css
+++ /dev/null
@@ -1,3 +0,0 @@
-* {
- background-color: white;
-}
\ No newline at end of file
diff --git a/vendor/github.com/PuerkitoBio/ghost/handlers/utils_test.go b/vendor/github.com/PuerkitoBio/ghost/handlers/utils_test.go
deleted file mode 100644
index d7a79b339..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/handlers/utils_test.go
+++ /dev/null
@@ -1,68 +0,0 @@
-package handlers
-
-import (
- "bytes"
- "compress/gzip"
- "io"
- "io/ioutil"
- "net/http"
- "testing"
-)
-
-func assertTrue(cond bool, msg string, t *testing.T) bool {
- if !cond {
- t.Error(msg)
- return false
- }
- return true
-}
-
-func assertStatus(ex, ac int, t *testing.T) {
- if ex != ac {
- t.Errorf("expected status code to be %d, got %d", ex, ac)
- }
-}
-
-func assertBody(ex []byte, res *http.Response, t *testing.T) {
- buf, err := ioutil.ReadAll(res.Body)
- if err != nil {
- panic(err)
- }
- defer res.Body.Close()
-
- if !bytes.Equal(ex, buf) {
- t.Errorf("expected body to be '%s' (%d), got '%s' (%d)", ex, len(ex), buf, len(buf))
- }
-}
-
-func assertGzippedBody(ex []byte, res *http.Response, t *testing.T) {
- gr, err := gzip.NewReader(res.Body)
- if err != nil {
- panic(err)
- }
- defer res.Body.Close()
-
- buf := bytes.NewBuffer(nil)
- _, err = io.Copy(buf, gr)
- if err != nil {
- panic(err)
- }
- if !bytes.Equal(ex, buf.Bytes()) {
- t.Errorf("expected unzipped body to be '%s' (%d), got '%s' (%d)", ex, len(ex), buf.Bytes(), buf.Len())
- }
-}
-
-func assertHeader(hName, ex string, res *http.Response, t *testing.T) {
- hVal, ok := res.Header[hName]
- if (!ok || len(hVal) == 0) && len(ex) > 0 {
- t.Errorf("expected header %s to be %s, was not set", hName, ex)
- } else if len(hVal) > 0 && hVal[0] != ex {
- t.Errorf("expected header %s to be %s, got %s", hName, ex, hVal)
- }
-}
-
-func assertPanic(t *testing.T) {
- if err := recover(); err == nil {
- t.Error("expected a panic, got none")
- }
-}
diff --git a/vendor/github.com/PuerkitoBio/ghost/tags b/vendor/github.com/PuerkitoBio/ghost/tags
deleted file mode 100644
index 6ea162cab..000000000
--- a/vendor/github.com/PuerkitoBio/ghost/tags
+++ /dev/null
@@ -1,203 +0,0 @@
-!_TAG_FILE_FORMAT 2 /extended format; --format=1 will not append ;" to lines/
-!_TAG_FILE_SORTED 1 /0=unsorted, 1=sorted, 2=foldcase/
-!_TAG_PROGRAM_AUTHOR Darren Hiebert /dhiebert@users.sourceforge.net/
-!_TAG_PROGRAM_NAME Exuberant Ctags //
-!_TAG_PROGRAM_URL http://ctags.sourceforge.net /official site/
-!_TAG_PROGRAM_VERSION 5.8 //
-AmberCompiler templates/amber/amber.go /^type AmberCompiler struct {$/;" t
-BadRequest handlers/basicauth.go /^func BadRequest(w http.ResponseWriter, msg string) {$/;" f
-BasicAuthHandler handlers/basicauth.go /^func BasicAuthHandler(h http.Handler,$/;" f
-BasicAuthHandlerFunc handlers/basicauth.go /^func BasicAuthHandlerFunc(h http.HandlerFunc,$/;" f
-Chain handlers/chain.go /^func (this *chainHandler) Chain(h http.Handler) ChainableHandler {$/;" f
-ChainFunc handlers/chain.go /^func (this *chainHandler) ChainFunc(h http.HandlerFunc) ChainableHandler {$/;" f
-ChainHandlerFuncs handlers/chain.go /^func ChainHandlerFuncs(h ...http.HandlerFunc) ChainableHandler {$/;" f
-ChainHandlers handlers/chain.go /^func ChainHandlers(h ...http.Handler) ChainableHandler {$/;" f
-ChainableHandler handlers/chain.go /^type ChainableHandler interface {$/;" t
-Clear handlers/redisstore.go /^func (this *RedisStore) Clear() error {$/;" f
-Clear handlers/sstore.go /^func (this *MemoryStore) Clear() error {$/;" f
-Compile templates/amber/amber.go /^func (this *AmberCompiler) Compile(f string) (templates.Templater, error) {$/;" f
-Compile templates/gotpl/gotpl.go /^func (this *GoTemplateCompiler) Compile(f string) (templates.Templater, error) {$/;" f
-Compile templates/template.go /^func Compile(path, base string) error {$/;" f
-CompileDir templates/template.go /^func CompileDir(dir string) error {$/;" f
-Context handlers/ghost.go /^func (this *ghostWriter) Context() map[interface{}]interface{} {$/;" f
-ContextHandler handlers/context.go /^func ContextHandler(h http.Handler, cap int) http.HandlerFunc {$/;" f
-ContextHandlerFunc handlers/context.go /^func ContextHandlerFunc(h http.HandlerFunc, cap int) http.HandlerFunc {$/;" f
-Created handlers/session.go /^func (ø *Session) Created() time.Time {$/;" f
-Delete handlers/redisstore.go /^func (this *RedisStore) Delete(id string) error {$/;" f
-Delete handlers/sstore.go /^func (this *MemoryStore) Delete(id string) error {$/;" f
-Execute templates/template.go /^func Execute(tplName string, w io.Writer, data interface{}) error {$/;" f
-FaviconHandler handlers/favicon.go /^func FaviconHandler(h http.Handler, path string, maxAge time.Duration) http.HandlerFunc {$/;" f
-FaviconHandlerFunc handlers/favicon.go /^func FaviconHandlerFunc(h http.HandlerFunc, path string, maxAge time.Duration) http.HandlerFunc {$/;" f
-GZIPHandler handlers/gzip.go /^func GZIPHandler(h http.Handler, filterFn func(http.ResponseWriter, *http.Request) bool) http.HandlerFunc {$/;" f
-GZIPHandlerFunc handlers/gzip.go /^func GZIPHandlerFunc(h http.HandlerFunc, filterFn func(http.ResponseWriter, *http.Request) bool) http.HandlerFunc {$/;" f
-Get handlers/redisstore.go /^func (this *RedisStore) Get(id string) (*Session, error) {$/;" f
-Get handlers/sstore.go /^func (this *MemoryStore) Get(id string) (*Session, error) {$/;" f
-GetContext handlers/context.go /^func GetContext(w http.ResponseWriter) (map[interface{}]interface{}, bool) {$/;" f
-GetPanicError handlers/panic.go /^func GetPanicError(w http.ResponseWriter) (interface{}, bool) {$/;" f
-GetResponseWriter handlers/reswriter.go /^func GetResponseWriter(w http.ResponseWriter,$/;" f
-GetSession handlers/session.go /^func GetSession(w http.ResponseWriter) (*Session, bool) {$/;" f
-GetSessionStore handlers/session.go /^func GetSessionStore(w http.ResponseWriter) (SessionStore, bool) {$/;" f
-GetUser handlers/basicauth.go /^func GetUser(w http.ResponseWriter) (interface{}, bool) {$/;" f
-GetUserName handlers/basicauth.go /^func GetUserName(w http.ResponseWriter) (string, bool) {$/;" f
-GhostHandlerFunc handlers/ghost.go /^func GhostHandlerFunc(h func(w GhostWriter, r *http.Request)) http.HandlerFunc {$/;" f
-GhostWriter handlers/ghost.go /^type GhostWriter interface {$/;" t
-GoTemplateCompiler templates/gotpl/gotpl.go /^type GoTemplateCompiler struct{}$/;" t
-Header handlers/reswriter_test.go /^func (b *baseWriter) Header() http.Header { return nil }$/;" f
-HeaderMatch handlers/header.go /^func HeaderMatch(hdr http.Header, nm string, matchType HeaderMatchType, test string) bool {$/;" f
-HeaderMatchType handlers/header.go /^type HeaderMatchType int$/;" t
-ID handlers/session.go /^func (ø *Session) ID() string {$/;" f
-IsNew handlers/session.go /^func (ø *Session) IsNew() bool {$/;" f
-Len handlers/redisstore.go /^func (this *RedisStore) Len() int {$/;" f
-Len handlers/sstore.go /^func (this *MemoryStore) Len() int {$/;" f
-LogFn app.go /^var LogFn = log.Printf$/;" v
-LogHandler handlers/log.go /^func LogHandler(h http.Handler, opts *LogOptions) http.HandlerFunc {$/;" f
-LogHandlerFunc handlers/log.go /^func LogHandlerFunc(h http.HandlerFunc, opts *LogOptions) http.HandlerFunc {$/;" f
-LogOptions handlers/log.go /^type LogOptions struct {$/;" t
-MarshalJSON handlers/session.go /^func (ø *Session) MarshalJSON() ([]byte, error) {$/;" f
-MaxAge handlers/session.go /^func (ø *Session) MaxAge() time.Duration {$/;" f
-MemoryStore handlers/sstore.go /^type MemoryStore struct {$/;" t
-NewAmberCompiler templates/amber/amber.go /^func NewAmberCompiler(opts amber.Options) *AmberCompiler {$/;" f
-NewChainableHandler handlers/chain.go /^func NewChainableHandler(h http.Handler) ChainableHandler {$/;" f
-NewLogOptions handlers/log.go /^func NewLogOptions(l func(string, ...interface{}), ft string, tok ...string) *LogOptions {$/;" f
-NewMemoryStore handlers/sstore.go /^func NewMemoryStore(capc int) *MemoryStore {$/;" f
-NewRedisStore handlers/redisstore.go /^func NewRedisStore(opts *RedisStoreOptions) *RedisStore {$/;" f
-NewSessionOptions handlers/session.go /^func NewSessionOptions(store SessionStore, secret string) *SessionOptions {$/;" f
-PanicHandler handlers/panic.go /^func PanicHandler(h http.Handler, errH http.Handler) http.HandlerFunc {$/;" f
-PanicHandlerFunc handlers/panic.go /^func PanicHandlerFunc(h http.HandlerFunc, errH http.HandlerFunc) http.HandlerFunc {$/;" f
-RedisStore handlers/redisstore.go /^type RedisStore struct {$/;" t
-RedisStoreOptions handlers/redisstore.go /^type RedisStoreOptions struct {$/;" t
-Register templates/template.go /^func Register(ext string, c TemplateCompiler) {$/;" f
-Render templates/template.go /^func Render(tplName string, w http.ResponseWriter, data interface{}) (err error) {$/;" f
-Session handlers/ghost.go /^func (this *ghostWriter) Session() *Session {$/;" f
-Session handlers/session.go /^type Session struct {$/;" t
-SessionHandler handlers/session.go /^func SessionHandler(h http.Handler, opts *SessionOptions) http.HandlerFunc {$/;" f
-SessionHandlerFunc handlers/session.go /^func SessionHandlerFunc(h http.HandlerFunc, opts *SessionOptions) http.HandlerFunc {$/;" f
-SessionOptions handlers/session.go /^type SessionOptions struct {$/;" t
-SessionStore handlers/sstore.go /^type SessionStore interface {$/;" t
-Set handlers/redisstore.go /^func (this *RedisStore) Set(sess *Session) error {$/;" f
-Set handlers/sstore.go /^func (this *MemoryStore) Set(sess *Session) error {$/;" f
-StaticFileHandler handlers/static.go /^func StaticFileHandler(path string) http.HandlerFunc {$/;" f
-TemplateCompiler templates/template.go /^type TemplateCompiler interface {$/;" t
-Templater templates/template.go /^type Templater interface {$/;" t
-TestBaseWriter handlers/reswriter_test.go /^func TestBaseWriter(t *testing.T) {$/;" f
-TestChaining handlers/chain_test.go /^func TestChaining(t *testing.T) {$/;" f
-TestChainingMixed handlers/chain_test.go /^func TestChainingMixed(t *testing.T) {$/;" f
-TestChainingWithHelperFunc handlers/chain_test.go /^func TestChainingWithHelperFunc(t *testing.T) {$/;" f
-TestContext handlers/context_test.go /^func TestContext(t *testing.T) {$/;" f
-TestCustom handlers/log_test.go /^func TestCustom(t *testing.T) {$/;" f
-TestFavicon handlers/favicon_test.go /^func TestFavicon(t *testing.T) {$/;" f
-TestFaviconFromCache handlers/favicon_test.go /^func TestFaviconFromCache(t *testing.T) {$/;" f
-TestFaviconInvalidPath handlers/favicon_test.go /^func TestFaviconInvalidPath(t *testing.T) {$/;" f
-TestGzipOnCustomFilter handlers/gzip_test.go /^func TestGzipOnCustomFilter(t *testing.T) {$/;" f
-TestGzipOuterPanic handlers/gzip_test.go /^func TestGzipOuterPanic(t *testing.T) {$/;" f
-TestGzipped handlers/gzip_test.go /^func TestGzipped(t *testing.T) {$/;" f
-TestGzippedAuth handlers/basicauth_test.go /^func TestGzippedAuth(t *testing.T) {$/;" f
-TestGzippedFile handlers/static_test.go /^func TestGzippedFile(t *testing.T) {$/;" f
-TestImmediate handlers/log_test.go /^func TestImmediate(t *testing.T) {$/;" f
-TestLog handlers/log_test.go /^func TestLog(t *testing.T) {$/;" f
-TestNilWriter handlers/reswriter_test.go /^func TestNilWriter(t *testing.T) {$/;" f
-TestNoGzip handlers/gzip_test.go /^func TestNoGzip(t *testing.T) {$/;" f
-TestNoGzipOnCustomFilter handlers/gzip_test.go /^func TestNoGzipOnCustomFilter(t *testing.T) {$/;" f
-TestNoGzipOnFilter handlers/gzip_test.go /^func TestNoGzipOnFilter(t *testing.T) {$/;" f
-TestNoPanic handlers/panic_test.go /^func TestNoPanic(t *testing.T) {$/;" f
-TestPanic handlers/panic_test.go /^func TestPanic(t *testing.T) {$/;" f
-TestPanicCustom handlers/panic_test.go /^func TestPanicCustom(t *testing.T) {$/;" f
-TestServeFile handlers/static_test.go /^func TestServeFile(t *testing.T) {$/;" f
-TestSession handlers/session_test.go /^func TestSession(t *testing.T) {$/;" f
-TestUnauth handlers/basicauth_test.go /^func TestUnauth(t *testing.T) {$/;" f
-TestWrappedContext handlers/context_test.go /^func TestWrappedContext(t *testing.T) {$/;" f
-TestWrappedNotFoundWriter handlers/reswriter_test.go /^func TestWrappedNotFoundWriter(t *testing.T) {$/;" f
-TestWrappedWriter handlers/reswriter_test.go /^func TestWrappedWriter(t *testing.T) {$/;" f
-Unauthorized handlers/basicauth.go /^func Unauthorized(w http.ResponseWriter, realm string) {$/;" f
-UnmarshalJSON handlers/session.go /^func (ø *Session) UnmarshalJSON(b []byte) error {$/;" f
-User handlers/ghost.go /^func (this *ghostWriter) User() interface{} {$/;" f
-UserName handlers/ghost.go /^func (this *ghostWriter) UserName() string {$/;" f
-WrapWriter handlers/reswriter.go /^type WrapWriter interface {$/;" t
-WrappedWriter handlers/basicauth.go /^func (this *userResponseWriter) WrappedWriter() http.ResponseWriter {$/;" f
-WrappedWriter handlers/context.go /^func (this *contextResponseWriter) WrappedWriter() http.ResponseWriter {$/;" f
-WrappedWriter handlers/gzip.go /^func (w *gzipResponseWriter) WrappedWriter() http.ResponseWriter {$/;" f
-WrappedWriter handlers/log.go /^func (this *statusResponseWriter) WrappedWriter() http.ResponseWriter {$/;" f
-WrappedWriter handlers/panic.go /^func (this *errResponseWriter) WrappedWriter() http.ResponseWriter {$/;" f
-WrappedWriter handlers/session.go /^func (ø *sessResponseWriter) WrappedWriter() http.ResponseWriter {$/;" f
-Write handlers/gzip.go /^func (w *gzipResponseWriter) Write(b []byte) (int, error) {$/;" f
-Write handlers/log.go /^func (this *statusResponseWriter) Write(data []byte) (int, error) {$/;" f
-Write handlers/reswriter_test.go /^func (b *baseWriter) Write(data []byte) (int, error) { return 0, nil }$/;" f
-Write handlers/session.go /^func (ø *sessResponseWriter) Write(data []byte) (int, error) {$/;" f
-WriteHeader handlers/gzip.go /^func (w *gzipResponseWriter) WriteHeader(code int) {$/;" f
-WriteHeader handlers/log.go /^func (this *statusResponseWriter) WriteHeader(code int) {$/;" f
-WriteHeader handlers/reswriter_test.go /^func (b *baseWriter) WriteHeader(code int) {}$/;" f
-WriteHeader handlers/session.go /^func (ø *sessResponseWriter) WriteHeader(code int) {$/;" f
-a handlers/testdata/script.js /^var a = 0;$/;" v
-acceptsGzip handlers/gzip.go /^func acceptsGzip(hdr http.Header) bool {$/;" f
-applyFilter handlers/gzip.go /^func (w *gzipResponseWriter) applyFilter() {$/;" f
-assertBody handlers/utils_test.go /^func assertBody(ex []byte, res *http.Response, t *testing.T) {$/;" f
-assertGzippedBody handlers/utils_test.go /^func assertGzippedBody(ex []byte, res *http.Response, t *testing.T) {$/;" f
-assertHeader handlers/utils_test.go /^func assertHeader(hName, ex string, res *http.Response, t *testing.T) {$/;" f
-assertPanic handlers/utils_test.go /^func assertPanic(t *testing.T) {$/;" f
-assertStatus handlers/utils_test.go /^func assertStatus(ex, ac int, t *testing.T) {$/;" f
-assertTrue handlers/utils_test.go /^func assertTrue(cond bool, msg string, t *testing.T) bool {$/;" f
-authenticate ghostest/main.go /^func authenticate(u, p string) (interface{}, bool) {$/;" f
-baseWriter handlers/reswriter_test.go /^type baseWriter struct{}$/;" t
-buf handlers/chain_test.go /^ var buf bytes.Buffer$/;" v
-buf handlers/favicon.go /^ var buf []byte$/;" v
-chainHandler handlers/chain.go /^type chainHandler struct {$/;" t
-ckSessId handlers/session.go /^ var ckSessId string$/;" v
-compileTemplate templates/template.go /^func compileTemplate(p, base string) error {$/;" f
-contextResponseWriter handlers/context.go /^type contextResponseWriter struct {$/;" t
-defaultFilter handlers/gzip.go /^func defaultFilter(w http.ResponseWriter, r *http.Request) bool {$/;" f
-doRequest handlers/session_test.go /^func doRequest(u string, newJar bool) *http.Response {$/;" f
-err handlers/favicon.go /^ var err error$/;" v
-err handlers/redisstore.go /^ var err error$/;" v
-err handlers/session_test.go /^ var err error$/;" v
-errResponseWriter handlers/panic.go /^type errResponseWriter struct {$/;" t
-fn.init ghostest/public/jquery-2.0.0.min.js /^(function(e,undefined){var t,n,r=typeof undefined,i=e.location,o=e.document,s=o.documentElement,a=e.jQuery,u=e.$,l={},c=[],f="2.0.0",p=c.concat,h=c.push,d=c.slice,g=c.indexOf,m=l.toString,y=l.hasOwnProperty,v=f.trim,x=function(e,n){return new x.fn.init(e,n,t)},b=\/[+-]?(?:\\d*\\.|)\\d+(?:[eE][+-]?\\d+|)\/.source,w=\/\\S+\/g,T=\/^(?:(<[\\w\\W]+>)[^>]*|#([\\w-]*))$\/,C=\/^<(\\w+)\\s*\\\/?>(?:<\\\/\\1>|)$\/,k=\/^-ms-\/,N=\/-([\\da-z])\/gi,E=function(e,t){return t.toUpperCase()},S=function(){o.removeEventListener("DOMContentLoaded",S,!1),e.removeEventListener("load",S,!1),x.ready()};x.fn=x.prototype={jquery:f,constructor:x,init:function(e,t,n){var r,i;if(!e)return this;if("string"==typeof e){if(r="<"===e.charAt(0)&&">"===e.charAt(e.length-1)&&e.length>=3?[null,e,null]:T.exec(e),!r||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof x?t[0]:t,x.merge(this,x.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:o,!0)),C.test(r[1])&&x.isPlainObject(t))for(r in t)x.isFunction(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return i=o.getElementById(r[2]),i&&i.parentNode&&(this.length=1,this[0]=i),this.context=o,this.selector=e,this}return e.nodeType?(this.context=this[0]=e,this.length=1,this):x.isFunction(e)?n.ready(e):(e.selector!==undefined&&(this.selector=e.selector,this.context=e.context),x.makeArray(e,this))},selector:"",length:0,toArray:function(){return d.call(this)},get:function(e){return null==e?this.toArray():0>e?this[this.length+e]:this[e]},pushStack:function(e){var t=x.merge(this.constructor(),e);return t.prevObject=this,t.context=this.context,t},each:function(e,t){return x.each(this,e,t)},ready:function(e){return x.ready.promise().done(e),this},slice:function(){return this.pushStack(d.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(0>e?t:0);return this.pushStack(n>=0&&t>n?[this[n]]:[])},map:function(e){return this.pushStack(x.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:h,sort:[].sort,splice:[].splice},x.fn.init.prototype=x.fn,x.extend=x.fn.extend=function(){var e,t,n,r,i,o,s=arguments[0]||{},a=1,u=arguments.length,l=!1;for("boolean"==typeof s&&(l=s,s=arguments[1]||{},a=2),"object"==typeof s||x.isFunction(s)||(s={}),u===a&&(s=this,--a);u>a;a++)if(null!=(e=arguments[a]))for(t in e)n=s[t],r=e[t],s!==r&&(l&&r&&(x.isPlainObject(r)||(i=x.isArray(r)))?(i?(i=!1,o=n&&x.isArray(n)?n:[]):o=n&&x.isPlainObject(n)?n:{},s[t]=x.extend(l,o,r)):r!==undefined&&(s[t]=r));return s},x.extend({expando:"jQuery"+(f+Math.random()).replace(\/\\D\/g,""),noConflict:function(t){return e.$===x&&(e.$=u),t&&e.jQuery===x&&(e.jQuery=a),x},isReady:!1,readyWait:1,holdReady:function(e){e?x.readyWait++:x.ready(!0)},ready:function(e){(e===!0?--x.readyWait:x.isReady)||(x.isReady=!0,e!==!0&&--x.readyWait>0||(n.resolveWith(o,[x]),x.fn.trigger&&x(o).trigger("ready").off("ready")))},isFunction:function(e){return"function"===x.type(e)},isArray:Array.isArray,isWindow:function(e){return null!=e&&e===e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[m.call(e)]||"object":typeof e},isPlainObject:function(e){if("object"!==x.type(e)||e.nodeType||x.isWindow(e))return!1;try{if(e.constructor&&!y.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(t){return!1}return!0},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw Error(e)},parseHTML:function(e,t,n){if(!e||"string"!=typeof e)return null;"boolean"==typeof t&&(n=t,t=!1),t=t||o;var r=C.exec(e),i=!n&&[];return r?[t.createElement(r[1])]:(r=x.buildFragment([e],t,i),i&&x(i).remove(),x.merge([],r.childNodes))},parseJSON:JSON.parse,parseXML:function(e){var t,n;if(!e||"string"!=typeof e)return null;try{n=new DOMParser,t=n.parseFromString(e,"text\/xml")}catch(r){t=undefined}return(!t||t.getElementsByTagName("parsererror").length)&&x.error("Invalid XML: "+e),t},noop:function(){},globalEval:function(e){var t,n=eval;e=x.trim(e),e&&(1===e.indexOf("use strict")?(t=o.createElement("script"),t.text=e,o.head.appendChild(t).parentNode.removeChild(t)):n(e))},camelCase:function(e){return e.replace(k,"ms-").replace(N,E)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,t,n){var r,i=0,o=e.length,s=j(e);if(n){if(s){for(;o>i;i++)if(r=t.apply(e[i],n),r===!1)break}else for(i in e)if(r=t.apply(e[i],n),r===!1)break}else if(s){for(;o>i;i++)if(r=t.call(e[i],i,e[i]),r===!1)break}else for(i in e)if(r=t.call(e[i],i,e[i]),r===!1)break;return e},trim:function(e){return null==e?"":v.call(e)},makeArray:function(e,t){var n=t||[];return null!=e&&(j(Object(e))?x.merge(n,"string"==typeof e?[e]:e):h.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:g.call(t,e,n)},merge:function(e,t){var n=t.length,r=e.length,i=0;if("number"==typeof n)for(;n>i;i++)e[r++]=t[i];else while(t[i]!==undefined)e[r++]=t[i++];return e.length=r,e},grep:function(e,t,n){var r,i=[],o=0,s=e.length;for(n=!!n;s>o;o++)r=!!t(e[o],o),n!==r&&i.push(e[o]);return i},map:function(e,t,n){var r,i=0,o=e.length,s=j(e),a=[];if(s)for(;o>i;i++)r=t(e[i],i,n),null!=r&&(a[a.length]=r);else for(i in e)r=t(e[i],i,n),null!=r&&(a[a.length]=r);return p.apply([],a)},guid:1,proxy:function(e,t){var n,r,i;return"string"==typeof t&&(n=e[t],t=e,e=n),x.isFunction(e)?(r=d.call(arguments,2),i=function(){return e.apply(t||this,r.concat(d.call(arguments)))},i.guid=e.guid=e.guid||x.guid++,i):undefined},access:function(e,t,n,r,i,o,s){var a=0,u=e.length,l=null==n;if("object"===x.type(n)){i=!0;for(a in n)x.access(e,t,a,n[a],!0,o,s)}else if(r!==undefined&&(i=!0,x.isFunction(r)||(s=!0),l&&(s?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(x(e),n)})),t))for(;u>a;a++)t(e[a],n,s?r:r.call(e[a],a,t(e[a],n)));return i?e:l?t.call(e):u?t(e[0],n):o},now:Date.now,swap:function(e,t,n,r){var i,o,s={};for(o in t)s[o]=e.style[o],e.style[o]=t[o];i=n.apply(e,r||[]);for(o in t)e.style[o]=s[o];return i}}),x.ready.promise=function(t){return n||(n=x.Deferred(),"complete"===o.readyState?setTimeout(x.ready):(o.addEventListener("DOMContentLoaded",S,!1),e.addEventListener("load",S,!1))),n.promise(t)},x.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(e,t){l["[object "+t+"]"]=t.toLowerCase()});function j(e){var t=e.length,n=x.type(e);return x.isWindow(e)?!1:1===e.nodeType&&t?!0:"array"===n||"function"!==n&&(0===t||"number"==typeof t&&t>0&&t-1 in e)}t=x(o),function(e,undefined){var t,n,r,i,o,s,a,u,l,c,f,p,h,d,g,m,y="sizzle"+-new Date,v=e.document,b={},w=0,T=0,C=ot(),k=ot(),N=ot(),E=!1,S=function(){return 0},j=typeof undefined,D=1<<31,A=[],L=A.pop,q=A.push,H=A.push,O=A.slice,F=A.indexOf||function(e){var t=0,n=this.length;for(;n>t;t++)if(this[t]===e)return t;return-1},P="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",R="[\\\\x20\\\\t\\\\r\\\\n\\\\f]",M="(?:\\\\\\\\.|[\\\\w-]|[^\\\\x00-\\\\xa0])+",W=M.replace("w","w#"),$="\\\\["+R+"*("+M+")"+R+"*(?:([*^$|!~]?=)"+R+"*(?:(['\\"])((?:\\\\\\\\.|[^\\\\\\\\])*?)\\\\3|("+W+")|)|)"+R+"*\\\\]",B=":("+M+")(?:\\\\(((['\\"])((?:\\\\\\\\.|[^\\\\\\\\])*?)\\\\3|((?:\\\\\\\\.|[^\\\\\\\\()[\\\\]]|"+$.replace(3,8)+")*)|.*)\\\\)|)",I=RegExp("^"+R+"+|((?:^|[^\\\\\\\\])(?:\\\\\\\\.)*)"+R+"+$","g"),z=RegExp("^"+R+"*,"+R+"*"),_=RegExp("^"+R+"*([>+~]|"+R+")"+R+"*"),X=RegExp(R+"*[+~]"),U=RegExp("="+R+"*([^\\\\]'\\"]*)"+R+"*\\\\]","g"),Y=RegExp(B),V=RegExp("^"+W+"$"),G={ID:RegExp("^#("+M+")"),CLASS:RegExp("^\\\\.("+M+")"),TAG:RegExp("^("+M.replace("w","w*")+")"),ATTR:RegExp("^"+$),PSEUDO:RegExp("^"+B),CHILD:RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\\\("+R+"*(even|odd|(([+-]|)(\\\\d*)n|)"+R+"*(?:([+-]|)"+R+"*(\\\\d+)|))"+R+"*\\\\)|)","i"),"boolean":RegExp("^(?:"+P+")$","i"),needsContext:RegExp("^"+R+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\\\("+R+"*((?:-\\\\d)?\\\\d*)"+R+"*\\\\)|)(?=[^-]|$)","i")},J=\/^[^{]+\\{\\s*\\[native \\w\/,Q=\/^(?:#([\\w-]+)|(\\w+)|\\.([\\w-]+))$\/,K=\/^(?:input|select|textarea|button)$\/i,Z=\/^h\\d$\/i,et=\/'|\\\\\/g,tt=\/\\\\([\\da-fA-F]{1,6}[\\x20\\t\\r\\n\\f]?|.)\/g,nt=function(e,t){var n="0x"+t-65536;return n!==n?t:0>n?String.fromCharCode(n+65536):String.fromCharCode(55296|n>>10,56320|1023&n)};try{H.apply(A=O.call(v.childNodes),v.childNodes),A[v.childNodes.length].nodeType}catch(rt){H={apply:A.length?function(e,t){q.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function it(e){return J.test(e+"")}function ot(){var e,t=[];return e=function(n,i){return t.push(n+=" ")>r.cacheLength&&delete e[t.shift()],e[n]=i}}function st(e){return e[y]=!0,e}function at(e){var t=c.createElement("div");try{return!!e(t)}catch(n){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function ut(e,t,n,r){var i,o,s,a,u,f,d,g,x,w;if((t?t.ownerDocument||t:v)!==c&&l(t),t=t||c,n=n||[],!e||"string"!=typeof e)return n;if(1!==(a=t.nodeType)&&9!==a)return[];if(p&&!r){if(i=Q.exec(e))if(s=i[1]){if(9===a){if(o=t.getElementById(s),!o||!o.parentNode)return n;if(o.id===s)return n.push(o),n}else if(t.ownerDocument&&(o=t.ownerDocument.getElementById(s))&&m(t,o)&&o.id===s)return n.push(o),n}else{if(i[2])return H.apply(n,t.getElementsByTagName(e)),n;if((s=i[3])&&b.getElementsByClassName&&t.getElementsByClassName)return H.apply(n,t.getElementsByClassName(s)),n}if(b.qsa&&(!h||!h.test(e))){if(g=d=y,x=t,w=9===a&&e,1===a&&"object"!==t.nodeName.toLowerCase()){f=gt(e),(d=t.getAttribute("id"))?g=d.replace(et,"\\\\$&"):t.setAttribute("id",g),g="[id='"+g+"'] ",u=f.length;while(u--)f[u]=g+mt(f[u]);x=X.test(e)&&t.parentNode||t,w=f.join(",")}if(w)try{return H.apply(n,x.querySelectorAll(w)),n}catch(T){}finally{d||t.removeAttribute("id")}}}return kt(e.replace(I,"$1"),t,n,r)}o=ut.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?"HTML"!==t.nodeName:!1},l=ut.setDocument=function(e){var t=e?e.ownerDocument||e:v;return t!==c&&9===t.nodeType&&t.documentElement?(c=t,f=t.documentElement,p=!o(t),b.getElementsByTagName=at(function(e){return e.appendChild(t.createComment("")),!e.getElementsByTagName("*").length}),b.attributes=at(function(e){return e.className="i",!e.getAttribute("className")}),b.getElementsByClassName=at(function(e){return e.innerHTML="