aboutsummaryrefslogtreecommitdiffhomepage
path: root/vendor/github.com
diff options
context:
space:
mode:
authorGravatar Frédéric Guillot <fred@miniflux.net>2017-12-16 11:25:18 -0800
committerGravatar Frédéric Guillot <fred@miniflux.net>2017-12-16 11:25:18 -0800
commit27196589fbd36f5f840e51b59bd6253d0f865db3 (patch)
tree4896268c735528045e4064ace92a2a75280585b8 /vendor/github.com
parent231ebf2daa9c024fbe99277f57935444946824bd (diff)
Add FeedIcon API call and update dependencies
Diffstat (limited to 'vendor/github.com')
-rw-r--r--vendor/github.com/miniflux/miniflux-go/client.go17
-rw-r--r--vendor/github.com/miniflux/miniflux-go/miniflux.go7
-rw-r--r--vendor/github.com/tdewolff/minify/README.md89
-rw-r--r--vendor/github.com/tdewolff/minify/html/html_test.go5
-rw-r--r--vendor/github.com/tdewolff/minify/js/js_test.go3
-rw-r--r--vendor/github.com/tdewolff/minify/xml/xml.go3
-rw-r--r--vendor/github.com/tdewolff/minify/xml/xml_test.go5
-rw-r--r--vendor/github.com/tdewolff/parse/buffer/lexer.go7
-rw-r--r--vendor/github.com/tdewolff/parse/css/parse_test.go3
-rw-r--r--vendor/github.com/tdewolff/parse/error.go32
-rw-r--r--vendor/github.com/tdewolff/parse/html/lex.go97
-rw-r--r--vendor/github.com/tdewolff/parse/html/lex_test.go27
-rw-r--r--vendor/github.com/tdewolff/parse/js/lex.go2
-rw-r--r--vendor/github.com/tdewolff/parse/json/parse.go6
-rw-r--r--vendor/github.com/tdewolff/parse/json/parse_test.go3
-rw-r--r--vendor/github.com/tdewolff/parse/xml/lex.go17
-rw-r--r--vendor/github.com/tdewolff/parse/xml/lex_test.go4
-rw-r--r--vendor/github.com/tomasen/realip/.travis.yml2
-rw-r--r--vendor/github.com/tomasen/realip/README.md25
-rw-r--r--vendor/github.com/tomasen/realip/realip.go100
-rw-r--r--vendor/github.com/tomasen/realip/realip_test.go75
21 files changed, 335 insertions, 194 deletions
diff --git a/vendor/github.com/miniflux/miniflux-go/client.go b/vendor/github.com/miniflux/miniflux-go/client.go
index b43b8a9..905ec53 100644
--- a/vendor/github.com/miniflux/miniflux-go/client.go
+++ b/vendor/github.com/miniflux/miniflux-go/client.go
@@ -274,6 +274,23 @@ func (c *Client) DeleteFeed(feedID int64) error {
return nil
}
+// FeedIcon gets a feed icon.
+func (c *Client) FeedIcon(feedID int64) (*FeedIcon, error) {
+ body, err := c.request.Get(fmt.Sprintf("/v1/feeds/%d/icon", feedID))
+ if err != nil {
+ return nil, err
+ }
+ defer body.Close()
+
+ var feedIcon *FeedIcon
+ decoder := json.NewDecoder(body)
+ if err := decoder.Decode(&feedIcon); err != nil {
+ return nil, fmt.Errorf("miniflux: response error (%v)", err)
+ }
+
+ return feedIcon, nil
+}
+
// Entry gets a single feed entry.
func (c *Client) Entry(feedID, entryID int64) (*Entry, error) {
body, err := c.request.Get(fmt.Sprintf("/v1/feeds/%d/entries/%d", feedID, entryID))
diff --git a/vendor/github.com/miniflux/miniflux-go/miniflux.go b/vendor/github.com/miniflux/miniflux-go/miniflux.go
index 452f2a2..aefabc8 100644
--- a/vendor/github.com/miniflux/miniflux-go/miniflux.go
+++ b/vendor/github.com/miniflux/miniflux-go/miniflux.go
@@ -79,6 +79,13 @@ type Feed struct {
Entries Entries `json:"entries,omitempty"`
}
+// FeedIcon represents the feed icon.
+type FeedIcon struct {
+ ID int64 `json:"id"`
+ MimeType string `json:"mime_type"`
+ Data string `json:"data"`
+}
+
// Feeds represents a list of feeds.
type Feeds []*Feed
diff --git a/vendor/github.com/tdewolff/minify/README.md b/vendor/github.com/tdewolff/minify/README.md
index 101bee8..588b3d6 100644
--- a/vendor/github.com/tdewolff/minify/README.md
+++ b/vendor/github.com/tdewolff/minify/README.md
@@ -8,7 +8,7 @@
---
-Minify is a minifier package written in [Go][1]. It provides HTML5, CSS3, JS, JSON, SVG and XML minifiers and an interface to implement any other minifier. Minification is the process of removing bytes from a file (such as whitespace) without changing its output and therefore shrinking its size and speeding up transmission over the internet and possibly parsing. The implemented minifiers are high performance and streaming, which implies O(n).
+Minify is a minifier package written in [Go][1]. It provides HTML5, CSS3, JS, JSON, SVG and XML minifiers and an interface to implement any other minifier. Minification is the process of removing bytes from a file (such as whitespace) without changing its output and therefore shrinking its size and speeding up transmission over the internet and possibly parsing. The implemented minifiers are designed for high performance.
The core functionality associates mimetypes with minification functions, allowing embedded resources (like CSS or JS within HTML files) to be minified as well. Users can add new implementations that are triggered based on a mimetype (or pattern), or redirect to an external command (like ClosureCompiler, UglifyCSS, ...).
@@ -100,51 +100,52 @@ The benchmarks directory contains a number of standardized samples used to compa
```
name time/op
-CSS/sample_bootstrap.css-4 3.05ms ± 1%
-CSS/sample_gumby.css-4 4.25ms ± 1%
-HTML/sample_amazon.html-4 3.33ms ± 0%
-HTML/sample_bbc.html-4 1.39ms ± 7%
-HTML/sample_blogpost.html-4 222µs ± 1%
-HTML/sample_es6.html-4 18.0ms ± 1%
-HTML/sample_stackoverflow.html-4 3.08ms ± 1%
-HTML/sample_wikipedia.html-4 6.06ms ± 1%
-JS/sample_ace.js-4 9.92ms ± 1%
-JS/sample_dot.js-4 91.4µs ± 4%
-JS/sample_jquery.js-4 4.00ms ± 1%
-JS/sample_jqueryui.js-4 7.93ms ± 0%
-JS/sample_moment.js-4 1.46ms ± 1%
-JSON/sample_large.json-4 5.07ms ± 4%
-JSON/sample_testsuite.json-4 2.96ms ± 0%
-JSON/sample_twitter.json-4 11.3µs ± 0%
-SVG/sample_arctic.svg-4 64.7ms ± 0%
-SVG/sample_gopher.svg-4 227µs ± 0%
-SVG/sample_usa.svg-4 35.9ms ± 6%
-XML/sample_books.xml-4 48.1µs ± 4%
-XML/sample_catalog.xml-4 20.2µs ± 0%
-XML/sample_omg.xml-4 9.02ms ± 0%
+CSS/sample_bootstrap.css-4 2.26ms ± 0%
+CSS/sample_gumby.css-4 2.92ms ± 1%
+HTML/sample_amazon.html-4 2.33ms ± 2%
+HTML/sample_bbc.html-4 1.02ms ± 1%
+HTML/sample_blogpost.html-4 171µs ± 2%
+HTML/sample_es6.html-4 14.5ms ± 0%
+HTML/sample_stackoverflow.html-4 2.41ms ± 1%
+HTML/sample_wikipedia.html-4 4.76ms ± 0%
+JS/sample_ace.js-4 7.41ms ± 0%
+JS/sample_dot.js-4 63.7µs ± 0%
+JS/sample_jquery.js-4 2.99ms ± 0%
+JS/sample_jqueryui.js-4 5.92ms ± 2%
+JS/sample_moment.js-4 1.09ms ± 1%
+JSON/sample_large.json-4 2.95ms ± 0%
+JSON/sample_testsuite.json-4 1.51ms ± 1%
+JSON/sample_twitter.json-4 6.75µs ± 1%
+SVG/sample_arctic.svg-4 62.3ms ± 1%
+SVG/sample_gopher.svg-4 218µs ± 0%
+SVG/sample_usa.svg-4 33.1ms ± 3%
+XML/sample_books.xml-4 36.2µs ± 0%
+XML/sample_catalog.xml-4 14.9µs ± 0%
+XML/sample_omg.xml-4 6.31ms ± 1%
name speed
-CSS/sample_bootstrap.css-4 45.0MB/s ± 1%
-CSS/sample_gumby.css-4 43.8MB/s ± 1%
-HTML/sample_amazon.html-4 142MB/s ± 0%
-HTML/sample_bbc.html-4 83.0MB/s ± 7%
-HTML/sample_blogpost.html-4 94.5MB/s ± 1%
-HTML/sample_es6.html-4 56.8MB/s ± 1%
-HTML/sample_stackoverflow.html-4 66.7MB/s ± 1%
-HTML/sample_wikipedia.html-4 73.5MB/s ± 1%
-JS/sample_ace.js-4 64.9MB/s ± 1%
-JS/sample_dot.js-4 56.4MB/s ± 4%
-JS/sample_jquery.js-4 61.8MB/s ± 1%
-JS/sample_jqueryui.js-4 59.2MB/s ± 0%
-JS/sample_moment.js-4 67.8MB/s ± 1%
-JSON/sample_large.json-4 150MB/s ± 4%
-JSON/sample_testsuite.json-4 233MB/s ± 0%
-JSON/sample_twitter.json-4 134MB/s ± 0%
-SVG/sample_arctic.svg-4 22.7MB/s ± 0%
-SVG/sample_gopher.svg-4 25.6MB/s ± 0%
-SVG/sample_usa.svg-4 28.6MB/s ± 6%
-XML/sample_books.xml-4 92.1MB/s ± 4%
-XML/sample_catalog.xml-4 95.6MB/s ± 0%
+CSS/sample_bootstrap.css-4 60.8MB/s ± 0%
+CSS/sample_gumby.css-4 63.9MB/s ± 1%
+HTML/sample_amazon.html-4 203MB/s ± 2%
+HTML/sample_bbc.html-4 113MB/s ± 1%
+HTML/sample_blogpost.html-4 123MB/s ± 2%
+HTML/sample_es6.html-4 70.7MB/s ± 0%
+HTML/sample_stackoverflow.html-4 85.2MB/s ± 1%
+HTML/sample_wikipedia.html-4 93.6MB/s ± 0%
+JS/sample_ace.js-4 86.9MB/s ± 0%
+JS/sample_dot.js-4 81.0MB/s ± 0%
+JS/sample_jquery.js-4 82.8MB/s ± 0%
+JS/sample_jqueryui.js-4 79.3MB/s ± 2%
+JS/sample_moment.js-4 91.2MB/s ± 1%
+JSON/sample_large.json-4 258MB/s ± 0%
+JSON/sample_testsuite.json-4 457MB/s ± 1%
+JSON/sample_twitter.json-4 226MB/s ± 1%
+SVG/sample_arctic.svg-4 23.6MB/s ± 1%
+SVG/sample_gopher.svg-4 26.7MB/s ± 0%
+SVG/sample_usa.svg-4 30.9MB/s ± 3%
+XML/sample_books.xml-4 122MB/s ± 0%
+XML/sample_catalog.xml-4 130MB/s ± 0%
+XML/sample_omg.xml-4 180MB/s ± 1%
```
## HTML
diff --git a/vendor/github.com/tdewolff/minify/html/html_test.go b/vendor/github.com/tdewolff/minify/html/html_test.go
index 9201c40..932dcf7 100644
--- a/vendor/github.com/tdewolff/minify/html/html_test.go
+++ b/vendor/github.com/tdewolff/minify/html/html_test.go
@@ -116,6 +116,11 @@ func TestHTML(t *testing.T) {
{`<meta e t n content=ful><a b`, `<meta e t n content=ful><a b>`},
{`<img alt=a'b="">`, `<img alt='a&#39;b=""'>`},
{`</b`, `</b`},
+ {`<title></`, `<title></`},
+ {`<svg <`, `<svg <`},
+ {`<svg "`, `<svg "`},
+ {`<svg></`, `<svg></`},
+ {`<script><!--<`, `<script><!--<`},
// bugs
{`<p>text</p><br>text`, `<p>text</p><br>text`}, // #122
diff --git a/vendor/github.com/tdewolff/minify/js/js_test.go b/vendor/github.com/tdewolff/minify/js/js_test.go
index 816ce90..d076725 100644
--- a/vendor/github.com/tdewolff/minify/js/js_test.go
+++ b/vendor/github.com/tdewolff/minify/js/js_test.go
@@ -40,6 +40,9 @@ func TestJS(t *testing.T) {
{"false\n\"string\"", "false\n\"string\""}, // #109
{"`\n", "`"}, // go fuzz
{"a\n~b", "a\n~b"}, // #132
+
+ // go-fuzz
+ {`/\`, `/\`},
}
m := minify.New()
diff --git a/vendor/github.com/tdewolff/minify/xml/xml.go b/vendor/github.com/tdewolff/minify/xml/xml.go
index 7974484..9486e34 100644
--- a/vendor/github.com/tdewolff/minify/xml/xml.go
+++ b/vendor/github.com/tdewolff/minify/xml/xml.go
@@ -43,6 +43,9 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st
for {
t := *tb.Shift()
if t.TokenType == xml.CDATAToken {
+ if len(t.Text) == 0 {
+ continue
+ }
if text, useText := xml.EscapeCDATAVal(&attrByteBuffer, t.Text); useText {
t.TokenType = xml.TextToken
t.Data = text
diff --git a/vendor/github.com/tdewolff/minify/xml/xml_test.go b/vendor/github.com/tdewolff/minify/xml/xml_test.go
index 5d99809..a87f9c6 100644
--- a/vendor/github.com/tdewolff/minify/xml/xml_test.go
+++ b/vendor/github.com/tdewolff/minify/xml/xml_test.go
@@ -39,7 +39,10 @@ func TestXML(t *testing.T) {
{"<style>lala{color:red}</style>", "<style>lala{color:red}</style>"},
{`cats and dogs `, `cats and dogs`},
- {`</0`, `</0`}, // go fuzz
+ // go fuzz
+ {`</0`, `</0`},
+ {`<!DOCTYPE`, `<!DOCTYPE`},
+ {`<![CDATA[`, ``},
}
m := minify.New()
diff --git a/vendor/github.com/tdewolff/parse/buffer/lexer.go b/vendor/github.com/tdewolff/parse/buffer/lexer.go
index 4bfb35a..45c1844 100644
--- a/vendor/github.com/tdewolff/parse/buffer/lexer.go
+++ b/vendor/github.com/tdewolff/parse/buffer/lexer.go
@@ -81,9 +81,14 @@ func (z *Lexer) Restore() {
// Err returns the error returned from io.Reader or io.EOF when the end has been reached.
func (z *Lexer) Err() error {
+ return z.PeekErr(0)
+}
+
+// PeekErr returns the error at position pos. When pos is zero, this is the same as calling Err().
+func (z *Lexer) PeekErr(pos int) error {
if z.err != nil {
return z.err
- } else if z.pos >= len(z.buf)-1 {
+ } else if z.pos+pos >= len(z.buf)-1 {
return io.EOF
}
return nil
diff --git a/vendor/github.com/tdewolff/parse/css/parse_test.go b/vendor/github.com/tdewolff/parse/css/parse_test.go
index 33f6f5f..9871854 100644
--- a/vendor/github.com/tdewolff/parse/css/parse_test.go
+++ b/vendor/github.com/tdewolff/parse/css/parse_test.go
@@ -174,7 +174,8 @@ func TestParseError(t *testing.T) {
if tt.col == 0 {
test.T(t, p.Err(), io.EOF)
} else if perr, ok := p.Err().(*parse.Error); ok {
- test.T(t, perr.Col, tt.col)
+ _, col, _ := perr.Position()
+ test.T(t, col, tt.col)
} else {
test.Fail(t, "bad error:", p.Err())
}
diff --git a/vendor/github.com/tdewolff/parse/error.go b/vendor/github.com/tdewolff/parse/error.go
index 1e85f9b..e790379 100644
--- a/vendor/github.com/tdewolff/parse/error.go
+++ b/vendor/github.com/tdewolff/parse/error.go
@@ -7,29 +7,43 @@ import (
"github.com/tdewolff/parse/buffer"
)
+// Error is a parsing error returned by parser. It contains a message and an offset at which the error occurred.
type Error struct {
Message string
- Line int
- Col int
- Context string
+ r io.Reader
+ Offset int
+ line int
+ column int
+ context string
}
+// NewError creates a new error
func NewError(msg string, r io.Reader, offset int) *Error {
- line, col, context, _ := Position(r, offset)
return &Error{
- msg,
- line,
- col,
- context,
+ Message: msg,
+ r: r,
+ Offset: offset,
}
}
+// NewErrorLexer creates a new error from a *buffer.Lexer
func NewErrorLexer(msg string, l *buffer.Lexer) *Error {
r := buffer.NewReader(l.Bytes())
offset := l.Offset()
return NewError(msg, r, offset)
}
+// Positions re-parses the file to determine the line, column, and context of the error.
+// Context is the entire line at which the error occurred.
+func (e *Error) Position() (int, int, string) {
+ if e.line == 0 {
+ e.line, e.column, e.context, _ = Position(e.r, e.Offset)
+ }
+ return e.line, e.column, e.context
+}
+
+// Error returns the error string, containing the context and line + column number.
func (e *Error) Error() string {
- return fmt.Sprintf("parse error:%d:%d: %s\n%s", e.Line, e.Col, e.Message, e.Context)
+ line, column, context := e.Position()
+ return fmt.Sprintf("parse error:%d:%d: %s\n%s", line, column, e.Message, context)
}
diff --git a/vendor/github.com/tdewolff/parse/html/lex.go b/vendor/github.com/tdewolff/parse/html/lex.go
index c81490a..b191247 100644
--- a/vendor/github.com/tdewolff/parse/html/lex.go
+++ b/vendor/github.com/tdewolff/parse/html/lex.go
@@ -79,10 +79,10 @@ func NewLexer(r io.Reader) *Lexer {
// Err returns the error encountered during lexing, this is often io.EOF but also other errors can be returned.
func (l *Lexer) Err() error {
- if err := l.r.Err(); err != nil {
- return err
+ if l.err != nil {
+ return l.err
}
- return l.err
+ return l.r.Err()
}
// Restore restores the NULL byte at the end of the buffer.
@@ -103,8 +103,7 @@ func (l *Lexer) Next() (TokenType, []byte) {
}
break
}
- if c == 0 {
- l.err = parse.NewErrorLexer("unexpected null character", l.r)
+ if c == 0 && l.r.Err() != nil {
return ErrorToken, nil
} else if c != '>' && (c != '/' || l.r.Peek(1) != '>') {
return AttributeToken, l.shiftAttribute()
@@ -133,13 +132,16 @@ func (l *Lexer) Next() (TokenType, []byte) {
c = l.r.Peek(0)
if c == '<' {
c = l.r.Peek(1)
+ isEndTag := c == '/' && l.r.Peek(2) != '>' && (l.r.Peek(2) != 0 || l.r.PeekErr(2) == nil)
if l.r.Pos() > 0 {
- if c == '/' && l.r.Peek(2) != 0 || 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '!' || c == '?' {
+ if isEndTag || 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '!' || c == '?' {
+ // return currently buffered texttoken so that we can return tag next iteration
return TextToken, l.r.Shift()
}
- } else if c == '/' && l.r.Peek(2) != 0 {
+ } else if isEndTag {
l.r.Move(2)
- if c = l.r.Peek(0); c != '>' && !('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') {
+ // only endtags that are not followed by > or EOF arrive here
+ if c = l.r.Peek(0); !('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') {
return CommentToken, l.shiftBogusComment()
}
return EndTagToken, l.shiftEndTag()
@@ -154,11 +156,10 @@ func (l *Lexer) Next() (TokenType, []byte) {
l.r.Move(1)
return CommentToken, l.shiftBogusComment()
}
- } else if c == 0 {
+ } else if c == 0 && l.r.Err() != nil {
if l.r.Pos() > 0 {
return TextToken, l.r.Shift()
}
- l.err = parse.NewErrorLexer("unexpected null character", l.r)
return ErrorToken, nil
}
l.r.Move(1)
@@ -182,7 +183,7 @@ func (l *Lexer) AttrVal() []byte {
func (l *Lexer) shiftRawText() []byte {
if l.rawTag == Plaintext {
for {
- if l.r.Peek(0) == 0 {
+ if l.r.Peek(0) == 0 && l.r.Err() != nil {
return l.r.Shift()
}
l.r.Move(1)
@@ -237,15 +238,16 @@ func (l *Lexer) shiftRawText() []byte {
inScript = false
}
}
- } else if c == 0 {
+ } else if c == 0 && l.r.Err() != nil {
return l.r.Shift()
+ } else {
+ l.r.Move(1)
}
- l.r.Move(1)
}
} else {
l.r.Move(1)
}
- } else if c == 0 {
+ } else if c == 0 && l.r.Err() != nil {
return l.r.Shift()
} else {
l.r.Move(1)
@@ -258,7 +260,7 @@ func (l *Lexer) readMarkup() (TokenType, []byte) {
if l.at('-', '-') {
l.r.Move(2)
for {
- if l.r.Peek(0) == 0 {
+ if l.r.Peek(0) == 0 && l.r.Err() != nil {
return CommentToken, l.r.Shift()
} else if l.at('-', '-', '>') {
l.text = l.r.Lexeme()[4:]
@@ -274,7 +276,7 @@ func (l *Lexer) readMarkup() (TokenType, []byte) {
} else if l.at('[', 'C', 'D', 'A', 'T', 'A', '[') {
l.r.Move(7)
for {
- if l.r.Peek(0) == 0 {
+ if l.r.Peek(0) == 0 && l.r.Err() != nil {
return TextToken, l.r.Shift()
} else if l.at(']', ']', '>') {
l.r.Move(3)
@@ -289,7 +291,7 @@ func (l *Lexer) readMarkup() (TokenType, []byte) {
l.r.Move(1)
}
for {
- if c := l.r.Peek(0); c == '>' || c == 0 {
+ if c := l.r.Peek(0); c == '>' || c == 0 && l.r.Err() != nil {
l.text = l.r.Lexeme()[9:]
if c == '>' {
l.r.Move(1)
@@ -310,7 +312,7 @@ func (l *Lexer) shiftBogusComment() []byte {
l.text = l.r.Lexeme()[2:]
l.r.Move(1)
return l.r.Shift()
- } else if c == 0 {
+ } else if c == 0 && l.r.Err() != nil {
l.text = l.r.Lexeme()[2:]
return l.r.Shift()
}
@@ -320,19 +322,25 @@ func (l *Lexer) shiftBogusComment() []byte {
func (l *Lexer) shiftStartTag() (TokenType, []byte) {
for {
- if c := l.r.Peek(0); c == ' ' || c == '>' || c == '/' && l.r.Peek(1) == '>' || c == '\t' || c == '\n' || c == '\r' || c == '\f' || c == 0 {
+ if c := l.r.Peek(0); c == ' ' || c == '>' || c == '/' && l.r.Peek(1) == '>' || c == '\t' || c == '\n' || c == '\r' || c == '\f' || c == 0 && l.r.Err() != nil {
break
}
l.r.Move(1)
}
l.text = parse.ToLower(l.r.Lexeme()[1:])
if h := ToHash(l.text); h == Textarea || h == Title || h == Style || h == Xmp || h == Iframe || h == Script || h == Plaintext || h == Svg || h == Math {
- if h == Svg {
- l.inTag = false
- return SvgToken, l.shiftXml(h)
- } else if h == Math {
+ if h == Svg || h == Math {
+ data := l.shiftXml(h)
+ if l.err != nil {
+ return ErrorToken, nil
+ }
+
l.inTag = false
- return MathToken, l.shiftXml(h)
+ if h == Svg {
+ return SvgToken, data
+ } else {
+ return MathToken, data
+ }
}
l.rawTag = h
}
@@ -343,7 +351,7 @@ func (l *Lexer) shiftAttribute() []byte {
nameStart := l.r.Pos()
var c byte
for { // attribute name state
- if c = l.r.Peek(0); c == ' ' || c == '=' || c == '>' || c == '/' && l.r.Peek(1) == '>' || c == '\t' || c == '\n' || c == '\r' || c == '\f' || c == 0 {
+ if c = l.r.Peek(0); c == ' ' || c == '=' || c == '>' || c == '/' && l.r.Peek(1) == '>' || c == '\t' || c == '\n' || c == '\r' || c == '\f' || c == 0 && l.r.Err() != nil {
break
}
l.r.Move(1)
@@ -374,14 +382,14 @@ func (l *Lexer) shiftAttribute() []byte {
if c == delim {
l.r.Move(1)
break
- } else if c == 0 {
+ } else if c == 0 && l.r.Err() != nil {
break
}
l.r.Move(1)
}
} else { // attribute value unquoted state
for {
- if c := l.r.Peek(0); c == ' ' || c == '>' || c == '\t' || c == '\n' || c == '\r' || c == '\f' || c == 0 {
+ if c := l.r.Peek(0); c == ' ' || c == '>' || c == '\t' || c == '\n' || c == '\r' || c == '\f' || c == 0 && l.r.Err() != nil {
break
}
l.r.Move(1)
@@ -403,7 +411,7 @@ func (l *Lexer) shiftEndTag() []byte {
l.text = l.r.Lexeme()[2:]
l.r.Move(1)
break
- } else if c == 0 {
+ } else if c == 0 && l.r.Err() != nil {
l.text = l.r.Lexeme()[2:]
break
}
@@ -422,6 +430,8 @@ func (l *Lexer) shiftEndTag() []byte {
return parse.ToLower(l.r.Shift())
}
+// shiftXml parses the content of a svg or math tag according to the XML 1.1 specifications, including the tag itself.
+// So far we have already parsed `<svg` or `<math`.
func (l *Lexer) shiftXml(rawTag Hash) []byte {
inQuote := false
for {
@@ -429,26 +439,26 @@ func (l *Lexer) shiftXml(rawTag Hash) []byte {
if c == '"' {
inQuote = !inQuote
l.r.Move(1)
- } else if c == '<' && !inQuote {
- if l.r.Peek(1) == '/' {
- mark := l.r.Pos()
- l.r.Move(2)
- for {
- if c = l.r.Peek(0); !('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') {
- break
- }
- l.r.Move(1)
- }
- if h := ToHash(parse.ToLower(parse.Copy(l.r.Lexeme()[mark+2:]))); h == rawTag { // copy so that ToLower doesn't change the case of the underlying slice
+ } else if c == '<' && !inQuote && l.r.Peek(1) == '/' {
+ mark := l.r.Pos()
+ l.r.Move(2)
+ for {
+ if c = l.r.Peek(0); !('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') {
break
}
- } else {
l.r.Move(1)
}
+ if h := ToHash(parse.ToLower(parse.Copy(l.r.Lexeme()[mark+2:]))); h == rawTag { // copy so that ToLower doesn't change the case of the underlying slice
+ break
+ }
} else if c == 0 {
+ if l.r.Err() == nil {
+ l.err = parse.NewErrorLexer("unexpected null character", l.r)
+ }
return l.r.Shift()
+ } else {
+ l.r.Move(1)
}
- l.r.Move(1)
}
for {
@@ -457,7 +467,10 @@ func (l *Lexer) shiftXml(rawTag Hash) []byte {
l.r.Move(1)
break
} else if c == 0 {
- break
+ if l.r.Err() == nil {
+ l.err = parse.NewErrorLexer("unexpected null character", l.r)
+ }
+ return l.r.Shift()
}
l.r.Move(1)
}
diff --git a/vendor/github.com/tdewolff/parse/html/lex_test.go b/vendor/github.com/tdewolff/parse/html/lex_test.go
index 5f4ca0b..51d9cfa 100644
--- a/vendor/github.com/tdewolff/parse/html/lex_test.go
+++ b/vendor/github.com/tdewolff/parse/html/lex_test.go
@@ -63,8 +63,22 @@ func TestTokens(t *testing.T) {
{"<script><!--", TTs{StartTagToken, StartTagCloseToken, TextToken}},
{"<script><!--var x='<script></script>';-->", TTs{StartTagToken, StartTagCloseToken, TextToken}},
+ // NULL
+ {"foo\x00bar", TTs{TextToken}},
+ {"<\x00foo>", TTs{TextToken}},
+ {"<foo\x00>", TTs{StartTagToken, StartTagCloseToken}},
+ {"</\x00bogus>", TTs{CommentToken}},
+ {"</foo\x00>", TTs{EndTagToken}},
+ {"<plaintext>\x00</plaintext>", TTs{StartTagToken, StartTagCloseToken, TextToken}},
+ {"<script>\x00</script>", TTs{StartTagToken, StartTagCloseToken, TextToken, EndTagToken}},
+ {"<!--\x00-->", TTs{CommentToken}},
+ {"<![CDATA[\x00]]>", TTs{TextToken}},
+ {"<!doctype\x00>", TTs{DoctypeToken}},
+ {"<?bogus\x00>", TTs{CommentToken}},
+ {"<?bogus\x00>", TTs{CommentToken}},
+
// go-fuzz
- {"</>", TTs{EndTagToken}},
+ {"</>", TTs{TextToken}},
}
for _, tt := range tokenTests {
t.Run(tt.html, func(t *testing.T) {
@@ -135,6 +149,11 @@ func TestAttributes(t *testing.T) {
{"<foo x", []string{"x", ""}},
{"<foo x=", []string{"x", ""}},
{"<foo x='", []string{"x", "'"}},
+
+ // NULL
+ {"<foo \x00>", []string{"\x00", ""}},
+ {"<foo \x00=\x00>", []string{"\x00", "\x00"}},
+ {"<foo \x00='\x00'>", []string{"\x00", "'\x00'"}},
}
for _, tt := range attributeTests {
t.Run(tt.attr, func(t *testing.T) {
@@ -164,7 +183,8 @@ func TestErrors(t *testing.T) {
html string
col int
}{
- {"a\x00b", 2},
+ {"<svg>\x00</svg>", 6},
+ {"<svg></svg\x00>", 11},
}
for _, tt := range errorTests {
t.Run(tt.html, func(t *testing.T) {
@@ -175,7 +195,8 @@ func TestErrors(t *testing.T) {
if tt.col == 0 {
test.T(t, l.Err(), io.EOF)
} else if perr, ok := l.Err().(*parse.Error); ok {
- test.T(t, perr.Col, tt.col)
+ _, col, _ := perr.Position()
+ test.T(t, col, tt.col)
} else {
test.Fail(t, "bad error:", l.Err())
}
diff --git a/vendor/github.com/tdewolff/parse/js/lex.go b/vendor/github.com/tdewolff/parse/js/lex.go
index 96ffcd8..3ee73e7 100644
--- a/vendor/github.com/tdewolff/parse/js/lex.go
+++ b/vendor/github.com/tdewolff/parse/js/lex.go
@@ -599,6 +599,8 @@ func (l *Lexer) consumeRegexpToken() bool {
if l.consumeLineTerminator() {
l.r.Rewind(mark)
return false
+ } else if l.r.Peek(0) == 0 {
+ return true
}
} else if l.consumeLineTerminator() {
l.r.Rewind(mark)
diff --git a/vendor/github.com/tdewolff/parse/json/parse.go b/vendor/github.com/tdewolff/parse/json/parse.go
index ae133f2..5c78af8 100644
--- a/vendor/github.com/tdewolff/parse/json/parse.go
+++ b/vendor/github.com/tdewolff/parse/json/parse.go
@@ -99,10 +99,10 @@ func NewParser(r io.Reader) *Parser {
// Err returns the error encountered during tokenization, this is often io.EOF but also other errors can be returned.
func (p *Parser) Err() error {
- if err := p.r.Err(); err != nil {
- return err
+ if p.err != nil {
+ return p.err
}
- return p.err
+ return p.r.Err()
}
// Restore restores the NULL byte at the end of the buffer.
diff --git a/vendor/github.com/tdewolff/parse/json/parse_test.go b/vendor/github.com/tdewolff/parse/json/parse_test.go
index 6ea28d1..354a93e 100644
--- a/vendor/github.com/tdewolff/parse/json/parse_test.go
+++ b/vendor/github.com/tdewolff/parse/json/parse_test.go
@@ -93,7 +93,8 @@ func TestGrammarsError(t *testing.T) {
if tt.col == 0 {
test.T(t, p.Err(), io.EOF)
} else if perr, ok := p.Err().(*parse.Error); ok {
- test.T(t, perr.Col, tt.col)
+ _, col, _ := perr.Position()
+ test.T(t, col, tt.col)
} else {
test.Fail(t, "bad error:", p.Err())
}
diff --git a/vendor/github.com/tdewolff/parse/xml/lex.go b/vendor/github.com/tdewolff/parse/xml/lex.go
index 0f1393c..c7ea4d1 100644
--- a/vendor/github.com/tdewolff/parse/xml/lex.go
+++ b/vendor/github.com/tdewolff/parse/xml/lex.go
@@ -81,11 +81,10 @@ func NewLexer(r io.Reader) *Lexer {
// Err returns the error encountered during lexing, this is often io.EOF but also other errors can be returned.
func (l *Lexer) Err() error {
- err := l.r.Err()
- if err != nil {
- return err
+ if l.err != nil {
+ return l.err
}
- return l.err
+ return l.r.Err()
}
// Restore restores the NULL byte at the end of the buffer.
@@ -107,7 +106,9 @@ func (l *Lexer) Next() (TokenType, []byte) {
break
}
if c == 0 {
- l.err = parse.NewErrorLexer("unexpected null character", l.r)
+ if l.r.Err() == nil {
+ l.err = parse.NewErrorLexer("unexpected null character", l.r)
+ }
return ErrorToken, nil
} else if c != '>' && (c != '/' && c != '?' || l.r.Peek(1) != '>') {
return AttributeToken, l.shiftAttribute()
@@ -148,7 +149,7 @@ func (l *Lexer) Next() (TokenType, []byte) {
l.r.Move(7)
return CDATAToken, l.shiftCDATAText()
} else if l.at('D', 'O', 'C', 'T', 'Y', 'P', 'E') {
- l.r.Move(8)
+ l.r.Move(7)
return DOCTYPEToken, l.shiftDOCTYPEText()
}
l.r.Move(-2)
@@ -164,7 +165,9 @@ func (l *Lexer) Next() (TokenType, []byte) {
if l.r.Pos() > 0 {
return TextToken, l.r.Shift()
}
- l.err = parse.NewErrorLexer("unexpected null character", l.r)
+ if l.r.Err() == nil {
+ l.err = parse.NewErrorLexer("unexpected null character", l.r)
+ }
return ErrorToken, nil
}
l.r.Move(1)
diff --git a/vendor/github.com/tdewolff/parse/xml/lex_test.go b/vendor/github.com/tdewolff/parse/xml/lex_test.go
index f8cdd17..d7d0e4a 100644
--- a/vendor/github.com/tdewolff/parse/xml/lex_test.go
+++ b/vendor/github.com/tdewolff/parse/xml/lex_test.go
@@ -155,6 +155,7 @@ func TestErrors(t *testing.T) {
col int
}{
{"a\x00b", 2},
+ {"<a\x00>", 3},
}
for _, tt := range errorTests {
t.Run(tt.xml, func(t *testing.T) {
@@ -165,7 +166,8 @@ func TestErrors(t *testing.T) {
if tt.col == 0 {
test.T(t, l.Err(), io.EOF)
} else if perr, ok := l.Err().(*parse.Error); ok {
- test.T(t, perr.Col, tt.col)
+ _, col, _ := perr.Position()
+ test.T(t, col, tt.col)
} else {
test.Fail(t, "bad error:", l.Err())
}
diff --git a/vendor/github.com/tomasen/realip/.travis.yml b/vendor/github.com/tomasen/realip/.travis.yml
index 9c90009..fdfbf87 100644
--- a/vendor/github.com/tomasen/realip/.travis.yml
+++ b/vendor/github.com/tomasen/realip/.travis.yml
@@ -1,8 +1,6 @@
language: go
go:
- - 1.4
- - 1.5
- tip
before_install:
diff --git a/vendor/github.com/tomasen/realip/README.md b/vendor/github.com/tomasen/realip/README.md
index 3eea89a..085f182 100644
--- a/vendor/github.com/tomasen/realip/README.md
+++ b/vendor/github.com/tomasen/realip/README.md
@@ -1,12 +1,27 @@
-a golang library that can get client's real public ip address from http request headers
+# RealIP
-[![Build Status](https://travis-ci.org/tomasen/realip.svg?branch=master)](https://travis-ci.org/Tomasen/realip)
[![GoDoc](https://godoc.org/github.com/Tomasen/realip?status.svg)](http://godoc.org/github.com/Tomasen/realip)
+Go package that can be used to get client's real public IP, which usually useful for logging HTTP server.
-* follow the rule of X-FORWARDED-FOR/rfc7239
-* follow the rule of X-Real-Ip
-* lan/intranet IP address filtered
+### Feature
+
+* Follows the rule of X-Real-IP
+* Follows the rule of X-Forwarded-For
+* Exclude local or private address
+
+## Example
+
+```go
+package main
+
+import "github.com/Tomasen/realip"
+
+func (h *Handler) ServeIndexPage(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
+ clientIP := realip.FromRequest(r)
+ log.Println("GET / from", clientIP)
+}
+```
## Developing
diff --git a/vendor/github.com/tomasen/realip/realip.go b/vendor/github.com/tomasen/realip/realip.go
index 09ed74d..e2803a2 100644
--- a/vendor/github.com/tomasen/realip/realip.go
+++ b/vendor/github.com/tomasen/realip/realip.go
@@ -1,7 +1,7 @@
package realip
import (
- "log"
+ "errors"
"net"
"net/http"
"strings"
@@ -10,62 +10,80 @@ import (
var cidrs []*net.IPNet
func init() {
- lancidrs := []string{
- "127.0.0.1/8", "10.0.0.0/8", "169.254.0.0/16", "172.16.0.0/12", "192.168.0.0/16", "::1/128", "fc00::/7",
+ maxCidrBlocks := []string{
+ "127.0.0.1/8", // localhost
+ "10.0.0.0/8", // 24-bit block
+ "172.16.0.0/12", // 20-bit block
+ "192.168.0.0/16", // 16-bit block
+ "169.254.0.0/16", // link local address
+ "::1/128", // localhost IPv6
+ "fc00::/7", // unique local address IPv6
+ "fe80::/10", // link local address IPv6
}
- cidrs = make([]*net.IPNet, len(lancidrs))
-
- for i, it := range lancidrs {
- _, cidrnet, err := net.ParseCIDR(it)
- if err != nil {
- log.Fatalf("ParseCIDR error: %v", err) // assuming I did it right above
- }
-
- cidrs[i] = cidrnet
+ cidrs = make([]*net.IPNet, len(maxCidrBlocks))
+ for i, maxCidrBlock := range maxCidrBlocks {
+ _, cidr, _ := net.ParseCIDR(maxCidrBlock)
+ cidrs[i] = cidr
}
}
-func isLocalAddress(addr string) bool {
+// isLocalAddress works by checking if the address is under private CIDR blocks.
+// List of private CIDR blocks can be seen on :
+//
+// https://en.wikipedia.org/wiki/Private_network
+//
+// https://en.wikipedia.org/wiki/Link-local_address
+func isPrivateAddress(address string) (bool, error) {
+ ipAddress := net.ParseIP(address)
+ if ipAddress == nil {
+ return false, errors.New("address is not valid")
+ }
+
for i := range cidrs {
- myaddr := net.ParseIP(addr)
- if cidrs[i].Contains(myaddr) {
- return true
+ if cidrs[i].Contains(ipAddress) {
+ return true, nil
}
}
- return false
+ return false, nil
}
-// Request.RemoteAddress contains port, which we want to remove i.e.:
-// "[::1]:58292" => "[::1]"
-func ipAddrFromRemoteAddr(s string) string {
- idx := strings.LastIndex(s, ":")
- if idx == -1 {
- return s
- }
- return s[:idx]
-}
+// FromRequest return client's real public IP address from http request headers.
+func FromRequest(r *http.Request) string {
+ // Fetch header value
+ xRealIP := r.Header.Get("X-Real-Ip")
+ xForwardedFor := r.Header.Get("X-Forwarded-For")
-// RealIP return client's real public IP address
-// from http request headers.
-func RealIP(r *http.Request) string {
- hdr := r.Header
- hdrRealIP := hdr.Get("X-Real-Ip")
- hdrForwardedFor := hdr.Get("X-Forwarded-For")
+ // If both empty, return IP from remote address
+ if xRealIP == "" && xForwardedFor == "" {
+ var remoteIP string
+
+ // If there are colon in remote address, remove the port number
+ // otherwise, return remote address as is
+ if strings.ContainsRune(r.RemoteAddr, ':') {
+ remoteIP, _, _ = net.SplitHostPort(r.RemoteAddr)
+ } else {
+ remoteIP = r.RemoteAddr
+ }
- if len(hdrForwardedFor) == 0 && len(hdrRealIP) == 0 {
- return ipAddrFromRemoteAddr(r.RemoteAddr)
+ return remoteIP
}
- // X-Forwarded-For is potentially a list of addresses separated with ","
- for _, addr := range strings.Split(hdrForwardedFor, ",") {
- // return first non-local address
- addr = strings.TrimSpace(addr)
- if len(addr) > 0 && !isLocalAddress(addr) {
- return addr
+ // Check list of IP in X-Forwarded-For and return the first global address
+ for _, address := range strings.Split(xForwardedFor, ",") {
+ address = strings.TrimSpace(address)
+ isPrivate, err := isPrivateAddress(address)
+ if !isPrivate && err == nil {
+ return address
}
}
- return hdrRealIP
+ // If nothing succeed, return X-Real-IP
+ return xRealIP
+}
+
+// RealIP is depreciated, use FromRequest instead
+func RealIP(r *http.Request) string {
+ return FromRequest(r)
}
diff --git a/vendor/github.com/tomasen/realip/realip_test.go b/vendor/github.com/tomasen/realip/realip_test.go
index e301ee8..e80efe0 100644
--- a/vendor/github.com/tomasen/realip/realip_test.go
+++ b/vendor/github.com/tomasen/realip/realip_test.go
@@ -2,11 +2,10 @@ package realip
import (
"net/http"
- "strings"
"testing"
)
-func TestIsLocalAddr(t *testing.T) {
+func TestIsPrivateAddr(t *testing.T) {
testData := map[string]bool{
"127.0.0.0": true,
"10.0.0.0": true,
@@ -24,7 +23,12 @@ func TestIsLocalAddr(t *testing.T) {
}
for addr, isLocal := range testData {
- if isLocalAddress(addr) != isLocal {
+ isPrivate, err := isPrivateAddress(addr)
+ if err != nil {
+ t.Errorf("fail processing %s: %v", addr, err)
+ }
+
+ if isPrivate != isLocal {
format := "%s should "
if !isLocal {
format += "not "
@@ -36,51 +40,56 @@ func TestIsLocalAddr(t *testing.T) {
}
}
-func TestIpAddrFromRemoteAddr(t *testing.T) {
- testData := map[string]string{
- "127.0.0.1:8888": "127.0.0.1",
- "ip:port": "ip",
- "ip": "ip",
- "12:34::0": "12:34:",
+func TestRealIP(t *testing.T) {
+ // Create type and function for testing
+ type testIP struct {
+ name string
+ request *http.Request
+ expected string
}
- for remoteAddr, expectedAddr := range testData {
- if actualAddr := ipAddrFromRemoteAddr(remoteAddr); actualAddr != expectedAddr {
- t.Errorf("ipAddrFromRemoteAddr of %s should be %s but get %s", remoteAddr, expectedAddr, actualAddr)
+ newRequest := func(remoteAddr, xRealIP string, xForwardedFor ...string) *http.Request {
+ h := http.Header{}
+ h.Set("X-Real-IP", xRealIP)
+ for _, address := range xForwardedFor {
+ h.Set("X-Forwarded-For", address)
}
- }
-}
-func TestRealIP(t *testing.T) {
- newRequest := func(remoteAddr, hdrRealIP, hdrForwardedFor string) *http.Request {
- h := http.Header{}
- h["X-Real-Ip"] = []string{hdrRealIP}
- h["X-Forwarded-For"] = []string{hdrForwardedFor}
return &http.Request{
RemoteAddr: remoteAddr,
Header: h,
}
}
- remoteAddr := "144.12.54.87"
- anotherRemoteAddr := "119.14.55.11"
+ // Create test data
+ publicAddr1 := "144.12.54.87"
+ publicAddr2 := "119.14.55.11"
localAddr := "127.0.0.0"
- testData := []struct {
- expected string
- request *http.Request
- }{
- {remoteAddr, newRequest(remoteAddr, "", "")}, // no header
- {remoteAddr, newRequest("", "", remoteAddr)}, // X-Forwarded-For: remoteAddr
- {remoteAddr, newRequest("", remoteAddr, "")}, // X-RealIP: remoteAddr
-
- // X-Forwarded-For: localAddr, remoteAddr, anotherRemoteAddr
- {remoteAddr, newRequest("", "", strings.Join([]string{localAddr, remoteAddr, anotherRemoteAddr}, ", "))},
+ testData := []testIP{
+ {
+ name: "No header",
+ request: newRequest(publicAddr1, ""),
+ expected: publicAddr1,
+ }, {
+ name: "Has X-Forwarded-For",
+ request: newRequest("", "", publicAddr1),
+ expected: publicAddr1,
+ }, {
+ name: "Has multiple X-Forwarded-For",
+ request: newRequest("", "", localAddr, publicAddr1, publicAddr2),
+ expected: publicAddr2,
+ }, {
+ name: "Has X-Real-IP",
+ request: newRequest("", publicAddr1),
+ expected: publicAddr1,
+ },
}
+ // Run test
for _, v := range testData {
- if actual := RealIP(v.request); v.expected != actual {
- t.Errorf("expected %s but get %s", v.expected, actual)
+ if actual := FromRequest(v.request); v.expected != actual {
+ t.Errorf("%s: expected %s but get %s", v.name, v.expected, actual)
}
}
}