aboutsummaryrefslogtreecommitdiffhomepage
path: root/vendor/github.com/tdewolff/parse/css
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/tdewolff/parse/css')
-rw-r--r--vendor/github.com/tdewolff/parse/css/README.md171
-rw-r--r--vendor/github.com/tdewolff/parse/css/hash.go676
-rw-r--r--vendor/github.com/tdewolff/parse/css/hash_test.go16
-rw-r--r--vendor/github.com/tdewolff/parse/css/lex.go710
-rw-r--r--vendor/github.com/tdewolff/parse/css/lex_test.go143
-rw-r--r--vendor/github.com/tdewolff/parse/css/parse.go398
-rw-r--r--vendor/github.com/tdewolff/parse/css/parse_test.go248
-rw-r--r--vendor/github.com/tdewolff/parse/css/util.go47
-rw-r--r--vendor/github.com/tdewolff/parse/css/util_test.go34
9 files changed, 2443 insertions, 0 deletions
diff --git a/vendor/github.com/tdewolff/parse/css/README.md b/vendor/github.com/tdewolff/parse/css/README.md
new file mode 100644
index 0000000..2013248
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/README.md
@@ -0,0 +1,171 @@
+# CSS [![GoDoc](http://godoc.org/github.com/tdewolff/parse/css?status.svg)](http://godoc.org/github.com/tdewolff/parse/css) [![GoCover](http://gocover.io/_badge/github.com/tdewolff/parse/css)](http://gocover.io/github.com/tdewolff/parse/css)
+
+This package is a CSS3 lexer and parser written in [Go][1]. Both follow the specification at [CSS Syntax Module Level 3](http://www.w3.org/TR/css-syntax-3/). The lexer takes an io.Reader and converts it into tokens until the EOF. The parser returns a parse tree of the full io.Reader input stream, but the low-level `Next` function can be used for stream parsing to returns grammar units until the EOF.
+
+## Installation
+Run the following command
+
+ go get github.com/tdewolff/parse/css
+
+or add the following import and run project with `go get`
+
+ import "github.com/tdewolff/parse/css"
+
+## Lexer
+### Usage
+The following initializes a new Lexer with io.Reader `r`:
+``` go
+l := css.NewLexer(r)
+```
+
+To tokenize until EOF an error, use:
+``` go
+for {
+ tt, text := l.Next()
+ switch tt {
+ case css.ErrorToken:
+ // error or EOF set in l.Err()
+ return
+ // ...
+ }
+}
+```
+
+All tokens (see [CSS Syntax Module Level 3](http://www.w3.org/TR/css3-syntax/)):
+``` go
+ErrorToken // non-official token, returned when errors occur
+IdentToken
+FunctionToken // rgb( rgba( ...
+AtKeywordToken // @abc
+HashToken // #abc
+StringToken
+BadStringToken
+UrlToken // url(
+BadUrlToken
+DelimToken // any unmatched character
+NumberToken // 5
+PercentageToken // 5%
+DimensionToken // 5em
+UnicodeRangeToken
+IncludeMatchToken // ~=
+DashMatchToken // |=
+PrefixMatchToken // ^=
+SuffixMatchToken // $=
+SubstringMatchToken // *=
+ColumnToken // ||
+WhitespaceToken
+CDOToken // <!--
+CDCToken // -->
+ColonToken
+SemicolonToken
+CommaToken
+BracketToken // ( ) [ ] { }, all bracket tokens use this, Data() can distinguish between the brackets
+CommentToken // non-official token
+```
+
+### Examples
+``` go
+package main
+
+import (
+ "os"
+
+ "github.com/tdewolff/parse/css"
+)
+
+// Tokenize CSS3 from stdin.
+func main() {
+ l := css.NewLexer(os.Stdin)
+ for {
+ tt, text := l.Next()
+ switch tt {
+ case css.ErrorToken:
+ if l.Err() != io.EOF {
+ fmt.Println("Error on line", l.Line(), ":", l.Err())
+ }
+ return
+ case css.IdentToken:
+ fmt.Println("Identifier", string(text))
+ case css.NumberToken:
+ fmt.Println("Number", string(text))
+ // ...
+ }
+ }
+}
+```
+
+## Parser
+### Usage
+The following creates a new Parser.
+``` go
+// true because this is the content of an inline style attribute
+p := css.NewParser(bytes.NewBufferString("color: red;"), true)
+```
+
+To iterate over the stylesheet, use:
+``` go
+for {
+ gt, _, data := p.Next()
+ if gt == css.ErrorGrammar {
+ break
+ }
+ // ...
+}
+```
+
+All grammar units returned by `Next`:
+``` go
+ErrorGrammar
+AtRuleGrammar
+EndAtRuleGrammar
+RulesetGrammar
+EndRulesetGrammar
+DeclarationGrammar
+TokenGrammar
+```
+
+### Examples
+``` go
+package main
+
+import (
+ "bytes"
+ "fmt"
+
+ "github.com/tdewolff/parse/css"
+)
+
+func main() {
+ // true because this is the content of an inline style attribute
+ p := css.NewParser(bytes.NewBufferString("color: red;"), true)
+ out := ""
+ for {
+ gt, _, data := p.Next()
+ if gt == css.ErrorGrammar {
+ break
+ } else if gt == css.AtRuleGrammar || gt == css.BeginAtRuleGrammar || gt == css.BeginRulesetGrammar || gt == css.DeclarationGrammar {
+ out += string(data)
+ if gt == css.DeclarationGrammar {
+ out += ":"
+ }
+ for _, val := range p.Values() {
+ out += string(val.Data)
+ }
+ if gt == css.BeginAtRuleGrammar || gt == css.BeginRulesetGrammar {
+ out += "{"
+ } else if gt == css.AtRuleGrammar || gt == css.DeclarationGrammar {
+ out += ";"
+ }
+ } else {
+ out += string(data)
+ }
+ }
+ fmt.Println(out)
+}
+
+```
+
+## License
+Released under the [MIT license](https://github.com/tdewolff/parse/blob/master/LICENSE.md).
+
+[1]: http://golang.org/ "Go Language"
diff --git a/vendor/github.com/tdewolff/parse/css/hash.go b/vendor/github.com/tdewolff/parse/css/hash.go
new file mode 100644
index 0000000..95595c3
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/hash.go
@@ -0,0 +1,676 @@
+package css
+
+// generated by hasher -type=Hash -file=hash.go; DO NOT EDIT, except for adding more constants to the list and rerun go generate
+
+// uses github.com/tdewolff/hasher
+//go:generate hasher -type=Hash -file=hash.go
+
+// Hash defines perfect hashes for a predefined list of strings
+type Hash uint32
+
+// Unique hash definitions to be used instead of strings
+const (
+ Accelerator Hash = 0x47f0b // accelerator
+ Aliceblue Hash = 0x52509 // aliceblue
+ Alpha Hash = 0x5af05 // alpha
+ Antiquewhite Hash = 0x45c0c // antiquewhite
+ Aquamarine Hash = 0x7020a // aquamarine
+ Azimuth Hash = 0x5b307 // azimuth
+ Background Hash = 0xa // background
+ Background_Attachment Hash = 0x3a15 // background-attachment
+ Background_Color Hash = 0x11c10 // background-color
+ Background_Image Hash = 0x99210 // background-image
+ Background_Position Hash = 0x13 // background-position
+ Background_Position_X Hash = 0x80815 // background-position-x
+ Background_Position_Y Hash = 0x15 // background-position-y
+ Background_Repeat Hash = 0x1511 // background-repeat
+ Behavior Hash = 0x3108 // behavior
+ Black Hash = 0x6005 // black
+ Blanchedalmond Hash = 0x650e // blanchedalmond
+ Blueviolet Hash = 0x52a0a // blueviolet
+ Bold Hash = 0x7a04 // bold
+ Border Hash = 0x8506 // border
+ Border_Bottom Hash = 0x850d // border-bottom
+ Border_Bottom_Color Hash = 0x8513 // border-bottom-color
+ Border_Bottom_Style Hash = 0xbe13 // border-bottom-style
+ Border_Bottom_Width Hash = 0xe113 // border-bottom-width
+ Border_Collapse Hash = 0x1020f // border-collapse
+ Border_Color Hash = 0x1350c // border-color
+ Border_Left Hash = 0x15c0b // border-left
+ Border_Left_Color Hash = 0x15c11 // border-left-color
+ Border_Left_Style Hash = 0x17911 // border-left-style
+ Border_Left_Width Hash = 0x18a11 // border-left-width
+ Border_Right Hash = 0x19b0c // border-right
+ Border_Right_Color Hash = 0x19b12 // border-right-color
+ Border_Right_Style Hash = 0x1ad12 // border-right-style
+ Border_Right_Width Hash = 0x1bf12 // border-right-width
+ Border_Spacing Hash = 0x1d10e // border-spacing
+ Border_Style Hash = 0x1f40c // border-style
+ Border_Top Hash = 0x2000a // border-top
+ Border_Top_Color Hash = 0x20010 // border-top-color
+ Border_Top_Style Hash = 0x21010 // border-top-style
+ Border_Top_Width Hash = 0x22010 // border-top-width
+ Border_Width Hash = 0x2300c // border-width
+ Bottom Hash = 0x8c06 // bottom
+ Burlywood Hash = 0x23c09 // burlywood
+ Cadetblue Hash = 0x25809 // cadetblue
+ Caption_Side Hash = 0x2610c // caption-side
+ Charset Hash = 0x44207 // charset
+ Chartreuse Hash = 0x2730a // chartreuse
+ Chocolate Hash = 0x27d09 // chocolate
+ Clear Hash = 0x2ab05 // clear
+ Clip Hash = 0x2b004 // clip
+ Color Hash = 0x9305 // color
+ Content Hash = 0x2e507 // content
+ Cornflowerblue Hash = 0x2ff0e // cornflowerblue
+ Cornsilk Hash = 0x30d08 // cornsilk
+ Counter_Increment Hash = 0x31511 // counter-increment
+ Counter_Reset Hash = 0x3540d // counter-reset
+ Cue Hash = 0x36103 // cue
+ Cue_After Hash = 0x36109 // cue-after
+ Cue_Before Hash = 0x36a0a // cue-before
+ Cursive Hash = 0x37b07 // cursive
+ Cursor Hash = 0x38e06 // cursor
+ Darkblue Hash = 0x7208 // darkblue
+ Darkcyan Hash = 0x7d08 // darkcyan
+ Darkgoldenrod Hash = 0x2440d // darkgoldenrod
+ Darkgray Hash = 0x25008 // darkgray
+ Darkgreen Hash = 0x79209 // darkgreen
+ Darkkhaki Hash = 0x88509 // darkkhaki
+ Darkmagenta Hash = 0x4f40b // darkmagenta
+ Darkolivegreen Hash = 0x7210e // darkolivegreen
+ Darkorange Hash = 0x7860a // darkorange
+ Darkorchid Hash = 0x87c0a // darkorchid
+ Darksalmon Hash = 0x8c00a // darksalmon
+ Darkseagreen Hash = 0x9240c // darkseagreen
+ Darkslateblue Hash = 0x3940d // darkslateblue
+ Darkslategray Hash = 0x3a10d // darkslategray
+ Darkturquoise Hash = 0x3ae0d // darkturquoise
+ Darkviolet Hash = 0x3bb0a // darkviolet
+ Deeppink Hash = 0x26b08 // deeppink
+ Deepskyblue Hash = 0x8930b // deepskyblue
+ Default Hash = 0x57b07 // default
+ Direction Hash = 0x9f109 // direction
+ Display Hash = 0x3c507 // display
+ Document Hash = 0x3d308 // document
+ Dodgerblue Hash = 0x3db0a // dodgerblue
+ Elevation Hash = 0x4a009 // elevation
+ Empty_Cells Hash = 0x4c20b // empty-cells
+ Fantasy Hash = 0x5ce07 // fantasy
+ Filter Hash = 0x59806 // filter
+ Firebrick Hash = 0x3e509 // firebrick
+ Float Hash = 0x3ee05 // float
+ Floralwhite Hash = 0x3f30b // floralwhite
+ Font Hash = 0xd804 // font
+ Font_Face Hash = 0xd809 // font-face
+ Font_Family Hash = 0x41d0b // font-family
+ Font_Size Hash = 0x42809 // font-size
+ Font_Size_Adjust Hash = 0x42810 // font-size-adjust
+ Font_Stretch Hash = 0x4380c // font-stretch
+ Font_Style Hash = 0x4490a // font-style
+ Font_Variant Hash = 0x4530c // font-variant
+ Font_Weight Hash = 0x46e0b // font-weight
+ Forestgreen Hash = 0x3700b // forestgreen
+ Fuchsia Hash = 0x47907 // fuchsia
+ Gainsboro Hash = 0x14c09 // gainsboro
+ Ghostwhite Hash = 0x1de0a // ghostwhite
+ Goldenrod Hash = 0x24809 // goldenrod
+ Greenyellow Hash = 0x7960b // greenyellow
+ Height Hash = 0x68506 // height
+ Honeydew Hash = 0x5b908 // honeydew
+ Hsl Hash = 0xf303 // hsl
+ Hsla Hash = 0xf304 // hsla
+ Ime_Mode Hash = 0x88d08 // ime-mode
+ Import Hash = 0x4e306 // import
+ Important Hash = 0x4e309 // important
+ Include_Source Hash = 0x7f20e // include-source
+ Indianred Hash = 0x4ec09 // indianred
+ Inherit Hash = 0x51907 // inherit
+ Initial Hash = 0x52007 // initial
+ Keyframes Hash = 0x40109 // keyframes
+ Lavender Hash = 0xf508 // lavender
+ Lavenderblush Hash = 0xf50d // lavenderblush
+ Lawngreen Hash = 0x4da09 // lawngreen
+ Layer_Background_Color Hash = 0x11616 // layer-background-color
+ Layer_Background_Image Hash = 0x98c16 // layer-background-image
+ Layout_Flow Hash = 0x5030b // layout-flow
+ Layout_Grid Hash = 0x53f0b // layout-grid
+ Layout_Grid_Char Hash = 0x53f10 // layout-grid-char
+ Layout_Grid_Char_Spacing Hash = 0x53f18 // layout-grid-char-spacing
+ Layout_Grid_Line Hash = 0x55710 // layout-grid-line
+ Layout_Grid_Mode Hash = 0x56d10 // layout-grid-mode
+ Layout_Grid_Type Hash = 0x58210 // layout-grid-type
+ Left Hash = 0x16304 // left
+ Lemonchiffon Hash = 0xcf0c // lemonchiffon
+ Letter_Spacing Hash = 0x5310e // letter-spacing
+ Lightblue Hash = 0x59e09 // lightblue
+ Lightcoral Hash = 0x5a70a // lightcoral
+ Lightcyan Hash = 0x5d509 // lightcyan
+ Lightgoldenrodyellow Hash = 0x5de14 // lightgoldenrodyellow
+ Lightgray Hash = 0x60509 // lightgray
+ Lightgreen Hash = 0x60e0a // lightgreen
+ Lightpink Hash = 0x61809 // lightpink
+ Lightsalmon Hash = 0x6210b // lightsalmon
+ Lightseagreen Hash = 0x62c0d // lightseagreen
+ Lightskyblue Hash = 0x6390c // lightskyblue
+ Lightslateblue Hash = 0x6450e // lightslateblue
+ Lightsteelblue Hash = 0x6530e // lightsteelblue
+ Lightyellow Hash = 0x6610b // lightyellow
+ Limegreen Hash = 0x67709 // limegreen
+ Line_Break Hash = 0x5630a // line-break
+ Line_Height Hash = 0x6800b // line-height
+ List_Style Hash = 0x68b0a // list-style
+ List_Style_Image Hash = 0x68b10 // list-style-image
+ List_Style_Position Hash = 0x69b13 // list-style-position
+ List_Style_Type Hash = 0x6ae0f // list-style-type
+ Magenta Hash = 0x4f807 // magenta
+ Margin Hash = 0x2c006 // margin
+ Margin_Bottom Hash = 0x2c00d // margin-bottom
+ Margin_Left Hash = 0x2cc0b // margin-left
+ Margin_Right Hash = 0x3320c // margin-right
+ Margin_Top Hash = 0x7cd0a // margin-top
+ Marker_Offset Hash = 0x6bd0d // marker-offset
+ Marks Hash = 0x6ca05 // marks
+ Max_Height Hash = 0x6e90a // max-height
+ Max_Width Hash = 0x6f309 // max-width
+ Media Hash = 0xa1405 // media
+ Mediumaquamarine Hash = 0x6fc10 // mediumaquamarine
+ Mediumblue Hash = 0x70c0a // mediumblue
+ Mediumorchid Hash = 0x7160c // mediumorchid
+ Mediumpurple Hash = 0x72f0c // mediumpurple
+ Mediumseagreen Hash = 0x73b0e // mediumseagreen
+ Mediumslateblue Hash = 0x7490f // mediumslateblue
+ Mediumspringgreen Hash = 0x75811 // mediumspringgreen
+ Mediumturquoise Hash = 0x7690f // mediumturquoise
+ Mediumvioletred Hash = 0x7780f // mediumvioletred
+ Midnightblue Hash = 0x7a60c // midnightblue
+ Min_Height Hash = 0x7b20a // min-height
+ Min_Width Hash = 0x7bc09 // min-width
+ Mintcream Hash = 0x7c509 // mintcream
+ Mistyrose Hash = 0x7e309 // mistyrose
+ Moccasin Hash = 0x7ec08 // moccasin
+ Monospace Hash = 0x8c709 // monospace
+ Namespace Hash = 0x49809 // namespace
+ Navajowhite Hash = 0x4a80b // navajowhite
+ None Hash = 0x4bf04 // none
+ Normal Hash = 0x4d506 // normal
+ Olivedrab Hash = 0x80009 // olivedrab
+ Orangered Hash = 0x78a09 // orangered
+ Orphans Hash = 0x48807 // orphans
+ Outline Hash = 0x81d07 // outline
+ Outline_Color Hash = 0x81d0d // outline-color
+ Outline_Style Hash = 0x82a0d // outline-style
+ Outline_Width Hash = 0x8370d // outline-width
+ Overflow Hash = 0x2db08 // overflow
+ Overflow_X Hash = 0x2db0a // overflow-x
+ Overflow_Y Hash = 0x8440a // overflow-y
+ Padding Hash = 0x2b307 // padding
+ Padding_Bottom Hash = 0x2b30e // padding-bottom
+ Padding_Left Hash = 0x5f90c // padding-left
+ Padding_Right Hash = 0x7d60d // padding-right
+ Padding_Top Hash = 0x8d90b // padding-top
+ Page Hash = 0x84e04 // page
+ Page_Break_After Hash = 0x8e310 // page-break-after
+ Page_Break_Before Hash = 0x84e11 // page-break-before
+ Page_Break_Inside Hash = 0x85f11 // page-break-inside
+ Palegoldenrod Hash = 0x8700d // palegoldenrod
+ Palegreen Hash = 0x89e09 // palegreen
+ Paleturquoise Hash = 0x8a70d // paleturquoise
+ Palevioletred Hash = 0x8b40d // palevioletred
+ Papayawhip Hash = 0x8d00a // papayawhip
+ Pause Hash = 0x8f305 // pause
+ Pause_After Hash = 0x8f30b // pause-after
+ Pause_Before Hash = 0x8fe0c // pause-before
+ Peachpuff Hash = 0x59009 // peachpuff
+ Pitch Hash = 0x90a05 // pitch
+ Pitch_Range Hash = 0x90a0b // pitch-range
+ Play_During Hash = 0x3c80b // play-during
+ Position Hash = 0xb08 // position
+ Powderblue Hash = 0x9150a // powderblue
+ Progid Hash = 0x91f06 // progid
+ Quotes Hash = 0x93006 // quotes
+ Rgb Hash = 0x3803 // rgb
+ Rgba Hash = 0x3804 // rgba
+ Richness Hash = 0x9708 // richness
+ Right Hash = 0x1a205 // right
+ Rosybrown Hash = 0x15309 // rosybrown
+ Royalblue Hash = 0xb509 // royalblue
+ Ruby_Align Hash = 0x12b0a // ruby-align
+ Ruby_Overhang Hash = 0x1400d // ruby-overhang
+ Ruby_Position Hash = 0x16c0d // ruby-position
+ Saddlebrown Hash = 0x48e0b // saddlebrown
+ Sandybrown Hash = 0x4cc0a // sandybrown
+ Sans_Serif Hash = 0x5c50a // sans-serif
+ Scrollbar_3d_Light_Color Hash = 0x9e18 // scrollbar-3d-light-color
+ Scrollbar_Arrow_Color Hash = 0x29615 // scrollbar-arrow-color
+ Scrollbar_Base_Color Hash = 0x40914 // scrollbar-base-color
+ Scrollbar_Dark_Shadow_Color Hash = 0x6ce1b // scrollbar-dark-shadow-color
+ Scrollbar_Face_Color Hash = 0x93514 // scrollbar-face-color
+ Scrollbar_Highlight_Color Hash = 0x9ce19 // scrollbar-highlight-color
+ Scrollbar_Shadow_Color Hash = 0x94916 // scrollbar-shadow-color
+ Scrollbar_Track_Color Hash = 0x95f15 // scrollbar-track-color
+ Seagreen Hash = 0x63108 // seagreen
+ Seashell Hash = 0x10f08 // seashell
+ Serif Hash = 0x5ca05 // serif
+ Size Hash = 0x42d04 // size
+ Slateblue Hash = 0x39809 // slateblue
+ Slategray Hash = 0x3a509 // slategray
+ Speak Hash = 0x97405 // speak
+ Speak_Header Hash = 0x9740c // speak-header
+ Speak_Numeral Hash = 0x9800d // speak-numeral
+ Speak_Punctuation Hash = 0x9a211 // speak-punctuation
+ Speech_Rate Hash = 0x9b30b // speech-rate
+ Springgreen Hash = 0x75e0b // springgreen
+ Steelblue Hash = 0x65809 // steelblue
+ Stress Hash = 0x29106 // stress
+ Supports Hash = 0x9c708 // supports
+ Table_Layout Hash = 0x4fd0c // table-layout
+ Text_Align Hash = 0x2840a // text-align
+ Text_Align_Last Hash = 0x2840f // text-align-last
+ Text_Autospace Hash = 0x1e60e // text-autospace
+ Text_Decoration Hash = 0x4b10f // text-decoration
+ Text_Indent Hash = 0x9bc0b // text-indent
+ Text_Justify Hash = 0x250c // text-justify
+ Text_Kashida_Space Hash = 0x4e12 // text-kashida-space
+ Text_Overflow Hash = 0x2d60d // text-overflow
+ Text_Shadow Hash = 0x2eb0b // text-shadow
+ Text_Transform Hash = 0x3250e // text-transform
+ Text_Underline_Position Hash = 0x33d17 // text-underline-position
+ Top Hash = 0x20703 // top
+ Turquoise Hash = 0x3b209 // turquoise
+ Unicode_Bidi Hash = 0x9e70c // unicode-bidi
+ Vertical_Align Hash = 0x3800e // vertical-align
+ Visibility Hash = 0x9fa0a // visibility
+ Voice_Family Hash = 0xa040c // voice-family
+ Volume Hash = 0xa1006 // volume
+ White Hash = 0x1e305 // white
+ White_Space Hash = 0x4630b // white-space
+ Whitesmoke Hash = 0x3f90a // whitesmoke
+ Widows Hash = 0x5c006 // widows
+ Width Hash = 0xef05 // width
+ Word_Break Hash = 0x2f50a // word-break
+ Word_Spacing Hash = 0x50d0c // word-spacing
+ Word_Wrap Hash = 0x5f109 // word-wrap
+ Writing_Mode Hash = 0x66b0c // writing-mode
+ Yellow Hash = 0x5ec06 // yellow
+ Yellowgreen Hash = 0x79b0b // yellowgreen
+ Z_Index Hash = 0xa1907 // z-index
+)
+
+// String returns the hash' name.
+func (i Hash) String() string {
+ start := uint32(i >> 8)
+ n := uint32(i & 0xff)
+ if start+n > uint32(len(_Hash_text)) {
+ return ""
+ }
+ return _Hash_text[start : start+n]
+}
+
+// ToHash returns the hash whose name is s. It returns zero if there is no
+// such hash. It is case sensitive.
+func ToHash(s []byte) Hash {
+ if len(s) == 0 || len(s) > _Hash_maxLen {
+ return 0
+ }
+ h := uint32(_Hash_hash0)
+ for i := 0; i < len(s); i++ {
+ h ^= uint32(s[i])
+ h *= 16777619
+ }
+ if i := _Hash_table[h&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) {
+ t := _Hash_text[i>>8 : i>>8+i&0xff]
+ for i := 0; i < len(s); i++ {
+ if t[i] != s[i] {
+ goto NEXT
+ }
+ }
+ return i
+ }
+NEXT:
+ if i := _Hash_table[(h>>16)&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) {
+ t := _Hash_text[i>>8 : i>>8+i&0xff]
+ for i := 0; i < len(s); i++ {
+ if t[i] != s[i] {
+ return 0
+ }
+ }
+ return i
+ }
+ return 0
+}
+
+const _Hash_hash0 = 0x700e0976
+const _Hash_maxLen = 27
+const _Hash_text = "background-position-ybackground-repeatext-justifybehaviorgba" +
+ "ckground-attachmentext-kashida-spaceblackblanchedalmondarkbl" +
+ "ueboldarkcyanborder-bottom-colorichnesscrollbar-3d-light-col" +
+ "oroyalblueborder-bottom-stylemonchiffont-faceborder-bottom-w" +
+ "idthslavenderblushborder-collapseashellayer-background-color" +
+ "uby-alignborder-coloruby-overhangainsborosybrownborder-left-" +
+ "coloruby-positionborder-left-styleborder-left-widthborder-ri" +
+ "ght-colorborder-right-styleborder-right-widthborder-spacingh" +
+ "ostwhitext-autospaceborder-styleborder-top-colorborder-top-s" +
+ "tyleborder-top-widthborder-widthburlywoodarkgoldenrodarkgray" +
+ "cadetbluecaption-sideeppinkchartreusechocolatext-align-lastr" +
+ "esscrollbar-arrow-colorclearclipadding-bottomargin-bottomarg" +
+ "in-leftext-overflow-xcontentext-shadoword-breakcornflowerblu" +
+ "ecornsilkcounter-incrementext-transformargin-rightext-underl" +
+ "ine-positioncounter-resetcue-aftercue-beforestgreencursivert" +
+ "ical-aligncursordarkslatebluedarkslategraydarkturquoisedarkv" +
+ "ioletdisplay-duringdocumentdodgerbluefirebrickfloatfloralwhi" +
+ "tesmokeyframescrollbar-base-colorfont-familyfont-size-adjust" +
+ "font-stretcharsetfont-stylefont-variantiquewhite-spacefont-w" +
+ "eightfuchsiacceleratorphansaddlebrownamespacelevationavajowh" +
+ "itext-decorationonempty-cellsandybrownormalawngreenimportant" +
+ "indianredarkmagentable-layout-floword-spacinginheritinitiali" +
+ "cebluevioletter-spacinglayout-grid-char-spacinglayout-grid-l" +
+ "ine-breaklayout-grid-modefaultlayout-grid-typeachpuffilterli" +
+ "ghtbluelightcoralphazimuthoneydewidowsans-serifantasylightcy" +
+ "anlightgoldenrodyelloword-wrapadding-leftlightgraylightgreen" +
+ "lightpinklightsalmonlightseagreenlightskybluelightslatebluel" +
+ "ightsteelbluelightyellowriting-modelimegreenline-heightlist-" +
+ "style-imagelist-style-positionlist-style-typemarker-offsetma" +
+ "rkscrollbar-dark-shadow-colormax-heightmax-widthmediumaquama" +
+ "rinemediumbluemediumorchidarkolivegreenmediumpurplemediumsea" +
+ "greenmediumslatebluemediumspringgreenmediumturquoisemediumvi" +
+ "oletredarkorangeredarkgreenyellowgreenmidnightbluemin-height" +
+ "min-widthmintcreamargin-topadding-rightmistyrosemoccasinclud" +
+ "e-sourceolivedrabackground-position-xoutline-coloroutline-st" +
+ "yleoutline-widthoverflow-ypage-break-beforepage-break-inside" +
+ "palegoldenrodarkorchidarkkhakime-modeepskybluepalegreenpalet" +
+ "urquoisepalevioletredarksalmonospacepapayawhipadding-topage-" +
+ "break-afterpause-afterpause-beforepitch-rangepowderblueprogi" +
+ "darkseagreenquotescrollbar-face-colorscrollbar-shadow-colors" +
+ "crollbar-track-colorspeak-headerspeak-numeralayer-background" +
+ "-imagespeak-punctuationspeech-ratext-indentsupportscrollbar-" +
+ "highlight-colorunicode-bidirectionvisibilityvoice-familyvolu" +
+ "mediaz-index"
+
+var _Hash_table = [1 << 9]Hash{
+ 0x0: 0x4cc0a, // sandybrown
+ 0x1: 0x20703, // top
+ 0x4: 0xb509, // royalblue
+ 0x6: 0x4b10f, // text-decoration
+ 0xb: 0x5030b, // layout-flow
+ 0xc: 0x11c10, // background-color
+ 0xd: 0x8c06, // bottom
+ 0x10: 0x62c0d, // lightseagreen
+ 0x11: 0x8930b, // deepskyblue
+ 0x12: 0x39809, // slateblue
+ 0x13: 0x4c20b, // empty-cells
+ 0x14: 0x2b004, // clip
+ 0x15: 0x70c0a, // mediumblue
+ 0x16: 0x49809, // namespace
+ 0x18: 0x2c00d, // margin-bottom
+ 0x1a: 0x1350c, // border-color
+ 0x1b: 0x5b908, // honeydew
+ 0x1d: 0x2300c, // border-width
+ 0x1e: 0x9740c, // speak-header
+ 0x1f: 0x8b40d, // palevioletred
+ 0x20: 0x1d10e, // border-spacing
+ 0x22: 0x2b307, // padding
+ 0x23: 0x3320c, // margin-right
+ 0x27: 0x7bc09, // min-width
+ 0x29: 0x60509, // lightgray
+ 0x2a: 0x6610b, // lightyellow
+ 0x2c: 0x8e310, // page-break-after
+ 0x2d: 0x2e507, // content
+ 0x30: 0x250c, // text-justify
+ 0x32: 0x2840f, // text-align-last
+ 0x34: 0x93514, // scrollbar-face-color
+ 0x35: 0x40109, // keyframes
+ 0x37: 0x4f807, // magenta
+ 0x38: 0x3a509, // slategray
+ 0x3a: 0x99210, // background-image
+ 0x3c: 0x7f20e, // include-source
+ 0x3d: 0x65809, // steelblue
+ 0x3e: 0x81d0d, // outline-color
+ 0x40: 0x1020f, // border-collapse
+ 0x41: 0xf508, // lavender
+ 0x42: 0x9c708, // supports
+ 0x44: 0x6800b, // line-height
+ 0x45: 0x9a211, // speak-punctuation
+ 0x46: 0x9fa0a, // visibility
+ 0x47: 0x2ab05, // clear
+ 0x4b: 0x52a0a, // blueviolet
+ 0x4e: 0x57b07, // default
+ 0x50: 0x6bd0d, // marker-offset
+ 0x52: 0x31511, // counter-increment
+ 0x53: 0x6450e, // lightslateblue
+ 0x54: 0x10f08, // seashell
+ 0x56: 0x16c0d, // ruby-position
+ 0x57: 0x82a0d, // outline-style
+ 0x58: 0x63108, // seagreen
+ 0x59: 0x9305, // color
+ 0x5c: 0x2610c, // caption-side
+ 0x5d: 0x68506, // height
+ 0x5e: 0x7490f, // mediumslateblue
+ 0x5f: 0x8fe0c, // pause-before
+ 0x60: 0xcf0c, // lemonchiffon
+ 0x63: 0x37b07, // cursive
+ 0x66: 0x4a80b, // navajowhite
+ 0x67: 0xa040c, // voice-family
+ 0x68: 0x2440d, // darkgoldenrod
+ 0x69: 0x3e509, // firebrick
+ 0x6a: 0x4490a, // font-style
+ 0x6b: 0x9f109, // direction
+ 0x6d: 0x7860a, // darkorange
+ 0x6f: 0x4530c, // font-variant
+ 0x70: 0x2c006, // margin
+ 0x71: 0x84e11, // page-break-before
+ 0x73: 0x2d60d, // text-overflow
+ 0x74: 0x4e12, // text-kashida-space
+ 0x75: 0x30d08, // cornsilk
+ 0x76: 0x46e0b, // font-weight
+ 0x77: 0x42d04, // size
+ 0x78: 0x53f0b, // layout-grid
+ 0x79: 0x8d90b, // padding-top
+ 0x7a: 0x44207, // charset
+ 0x7d: 0x7e309, // mistyrose
+ 0x7e: 0x5b307, // azimuth
+ 0x7f: 0x8f30b, // pause-after
+ 0x84: 0x38e06, // cursor
+ 0x85: 0xf303, // hsl
+ 0x86: 0x5310e, // letter-spacing
+ 0x8b: 0x3d308, // document
+ 0x8d: 0x36109, // cue-after
+ 0x8f: 0x36a0a, // cue-before
+ 0x91: 0x5ce07, // fantasy
+ 0x94: 0x1400d, // ruby-overhang
+ 0x95: 0x2b30e, // padding-bottom
+ 0x9a: 0x59e09, // lightblue
+ 0x9c: 0x8c00a, // darksalmon
+ 0x9d: 0x42810, // font-size-adjust
+ 0x9e: 0x61809, // lightpink
+ 0xa0: 0x9240c, // darkseagreen
+ 0xa2: 0x85f11, // page-break-inside
+ 0xa4: 0x24809, // goldenrod
+ 0xa6: 0xa1405, // media
+ 0xa7: 0x53f18, // layout-grid-char-spacing
+ 0xa9: 0x4e309, // important
+ 0xaa: 0x7b20a, // min-height
+ 0xb0: 0x15c11, // border-left-color
+ 0xb1: 0x84e04, // page
+ 0xb2: 0x98c16, // layer-background-image
+ 0xb5: 0x55710, // layout-grid-line
+ 0xb6: 0x1511, // background-repeat
+ 0xb7: 0x8513, // border-bottom-color
+ 0xb9: 0x25008, // darkgray
+ 0xbb: 0x5f90c, // padding-left
+ 0xbc: 0x1a205, // right
+ 0xc0: 0x40914, // scrollbar-base-color
+ 0xc1: 0x6530e, // lightsteelblue
+ 0xc2: 0xef05, // width
+ 0xc5: 0x3b209, // turquoise
+ 0xc8: 0x3ee05, // float
+ 0xca: 0x12b0a, // ruby-align
+ 0xcb: 0xb08, // position
+ 0xcc: 0x7cd0a, // margin-top
+ 0xce: 0x2cc0b, // margin-left
+ 0xcf: 0x2eb0b, // text-shadow
+ 0xd0: 0x2f50a, // word-break
+ 0xd4: 0x3f90a, // whitesmoke
+ 0xd6: 0x33d17, // text-underline-position
+ 0xd7: 0x1bf12, // border-right-width
+ 0xd8: 0x80009, // olivedrab
+ 0xd9: 0x89e09, // palegreen
+ 0xdb: 0x4e306, // import
+ 0xdc: 0x6ca05, // marks
+ 0xdd: 0x3bb0a, // darkviolet
+ 0xde: 0x13, // background-position
+ 0xe0: 0x6fc10, // mediumaquamarine
+ 0xe1: 0x7a04, // bold
+ 0xe2: 0x7690f, // mediumturquoise
+ 0xe4: 0x8700d, // palegoldenrod
+ 0xe5: 0x4f40b, // darkmagenta
+ 0xe6: 0x15309, // rosybrown
+ 0xe7: 0x18a11, // border-left-width
+ 0xe8: 0x88509, // darkkhaki
+ 0xea: 0x650e, // blanchedalmond
+ 0xeb: 0x52007, // initial
+ 0xec: 0x6ce1b, // scrollbar-dark-shadow-color
+ 0xee: 0x48e0b, // saddlebrown
+ 0xef: 0x8a70d, // paleturquoise
+ 0xf1: 0x19b12, // border-right-color
+ 0xf3: 0x1e305, // white
+ 0xf7: 0x9ce19, // scrollbar-highlight-color
+ 0xf9: 0x56d10, // layout-grid-mode
+ 0xfc: 0x1f40c, // border-style
+ 0xfe: 0x69b13, // list-style-position
+ 0x100: 0x11616, // layer-background-color
+ 0x102: 0x58210, // layout-grid-type
+ 0x103: 0x15c0b, // border-left
+ 0x104: 0x2db08, // overflow
+ 0x105: 0x7a60c, // midnightblue
+ 0x10b: 0x2840a, // text-align
+ 0x10e: 0x21010, // border-top-style
+ 0x110: 0x5de14, // lightgoldenrodyellow
+ 0x114: 0x8506, // border
+ 0x119: 0xd804, // font
+ 0x11c: 0x7020a, // aquamarine
+ 0x11d: 0x60e0a, // lightgreen
+ 0x11e: 0x5ec06, // yellow
+ 0x120: 0x97405, // speak
+ 0x121: 0x4630b, // white-space
+ 0x123: 0x3940d, // darkslateblue
+ 0x125: 0x1e60e, // text-autospace
+ 0x128: 0xf50d, // lavenderblush
+ 0x12c: 0x6210b, // lightsalmon
+ 0x12d: 0x51907, // inherit
+ 0x131: 0x87c0a, // darkorchid
+ 0x132: 0x2000a, // border-top
+ 0x133: 0x3c80b, // play-during
+ 0x137: 0x22010, // border-top-width
+ 0x139: 0x48807, // orphans
+ 0x13a: 0x41d0b, // font-family
+ 0x13d: 0x3db0a, // dodgerblue
+ 0x13f: 0x8d00a, // papayawhip
+ 0x140: 0x8f305, // pause
+ 0x143: 0x2ff0e, // cornflowerblue
+ 0x144: 0x3c507, // display
+ 0x146: 0x52509, // aliceblue
+ 0x14a: 0x7208, // darkblue
+ 0x14b: 0x3108, // behavior
+ 0x14c: 0x3540d, // counter-reset
+ 0x14d: 0x7960b, // greenyellow
+ 0x14e: 0x75811, // mediumspringgreen
+ 0x14f: 0x9150a, // powderblue
+ 0x150: 0x53f10, // layout-grid-char
+ 0x158: 0x81d07, // outline
+ 0x159: 0x23c09, // burlywood
+ 0x15b: 0xe113, // border-bottom-width
+ 0x15c: 0x4bf04, // none
+ 0x15e: 0x36103, // cue
+ 0x15f: 0x4fd0c, // table-layout
+ 0x160: 0x90a0b, // pitch-range
+ 0x161: 0xa1907, // z-index
+ 0x162: 0x29106, // stress
+ 0x163: 0x80815, // background-position-x
+ 0x165: 0x4d506, // normal
+ 0x167: 0x72f0c, // mediumpurple
+ 0x169: 0x5a70a, // lightcoral
+ 0x16c: 0x6e90a, // max-height
+ 0x16d: 0x3804, // rgba
+ 0x16e: 0x68b10, // list-style-image
+ 0x170: 0x26b08, // deeppink
+ 0x173: 0x91f06, // progid
+ 0x175: 0x75e0b, // springgreen
+ 0x176: 0x3700b, // forestgreen
+ 0x179: 0x7ec08, // moccasin
+ 0x17a: 0x7780f, // mediumvioletred
+ 0x17e: 0x9bc0b, // text-indent
+ 0x181: 0x6ae0f, // list-style-type
+ 0x182: 0x14c09, // gainsboro
+ 0x183: 0x3ae0d, // darkturquoise
+ 0x184: 0x3a10d, // darkslategray
+ 0x189: 0x2db0a, // overflow-x
+ 0x18b: 0x93006, // quotes
+ 0x18c: 0x3a15, // background-attachment
+ 0x18f: 0x19b0c, // border-right
+ 0x191: 0x6005, // black
+ 0x192: 0x79b0b, // yellowgreen
+ 0x194: 0x59009, // peachpuff
+ 0x197: 0x3f30b, // floralwhite
+ 0x19c: 0x7210e, // darkolivegreen
+ 0x19d: 0x5f109, // word-wrap
+ 0x19e: 0x17911, // border-left-style
+ 0x1a0: 0x9b30b, // speech-rate
+ 0x1a1: 0x8370d, // outline-width
+ 0x1a2: 0x9e70c, // unicode-bidi
+ 0x1a3: 0x68b0a, // list-style
+ 0x1a4: 0x90a05, // pitch
+ 0x1a5: 0x95f15, // scrollbar-track-color
+ 0x1a6: 0x47907, // fuchsia
+ 0x1a8: 0x3800e, // vertical-align
+ 0x1ad: 0x5af05, // alpha
+ 0x1ae: 0x6f309, // max-width
+ 0x1af: 0x9708, // richness
+ 0x1b0: 0x3803, // rgb
+ 0x1b1: 0x7d60d, // padding-right
+ 0x1b2: 0x29615, // scrollbar-arrow-color
+ 0x1b3: 0x16304, // left
+ 0x1b5: 0x4a009, // elevation
+ 0x1b6: 0x5630a, // line-break
+ 0x1ba: 0x27d09, // chocolate
+ 0x1bb: 0x9800d, // speak-numeral
+ 0x1bd: 0x47f0b, // accelerator
+ 0x1be: 0x67709, // limegreen
+ 0x1c1: 0x7d08, // darkcyan
+ 0x1c3: 0x6390c, // lightskyblue
+ 0x1c5: 0x5c50a, // sans-serif
+ 0x1c6: 0x850d, // border-bottom
+ 0x1c7: 0xa, // background
+ 0x1c8: 0xa1006, // volume
+ 0x1ca: 0x66b0c, // writing-mode
+ 0x1cb: 0x9e18, // scrollbar-3d-light-color
+ 0x1cc: 0x5c006, // widows
+ 0x1cf: 0x42809, // font-size
+ 0x1d0: 0x15, // background-position-y
+ 0x1d1: 0x5d509, // lightcyan
+ 0x1d4: 0x4ec09, // indianred
+ 0x1d7: 0x1de0a, // ghostwhite
+ 0x1db: 0x78a09, // orangered
+ 0x1dc: 0x45c0c, // antiquewhite
+ 0x1dd: 0x4da09, // lawngreen
+ 0x1df: 0x73b0e, // mediumseagreen
+ 0x1e0: 0x20010, // border-top-color
+ 0x1e2: 0xf304, // hsla
+ 0x1e4: 0x3250e, // text-transform
+ 0x1e6: 0x7160c, // mediumorchid
+ 0x1e9: 0x8c709, // monospace
+ 0x1ec: 0x94916, // scrollbar-shadow-color
+ 0x1ed: 0x79209, // darkgreen
+ 0x1ef: 0x25809, // cadetblue
+ 0x1f0: 0x59806, // filter
+ 0x1f1: 0x1ad12, // border-right-style
+ 0x1f6: 0x8440a, // overflow-y
+ 0x1f7: 0xd809, // font-face
+ 0x1f8: 0x50d0c, // word-spacing
+ 0x1fa: 0xbe13, // border-bottom-style
+ 0x1fb: 0x4380c, // font-stretch
+ 0x1fc: 0x7c509, // mintcream
+ 0x1fd: 0x88d08, // ime-mode
+ 0x1fe: 0x2730a, // chartreuse
+ 0x1ff: 0x5ca05, // serif
+}
diff --git a/vendor/github.com/tdewolff/parse/css/hash_test.go b/vendor/github.com/tdewolff/parse/css/hash_test.go
new file mode 100644
index 0000000..e176cc1
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/hash_test.go
@@ -0,0 +1,16 @@
+package css // import "github.com/tdewolff/parse/css"
+
+import (
+ "testing"
+
+ "github.com/tdewolff/test"
+)
+
+func TestHashTable(t *testing.T) {
+ test.T(t, ToHash([]byte("font")), Font, "'font' must resolve to hash.Font")
+ test.T(t, Font.String(), "font")
+ test.T(t, Margin_Left.String(), "margin-left")
+ test.T(t, ToHash([]byte("")), Hash(0), "empty string must resolve to zero")
+ test.T(t, Hash(0xffffff).String(), "")
+ test.T(t, ToHash([]byte("fonts")), Hash(0), "'fonts' must resolve to zero")
+}
diff --git a/vendor/github.com/tdewolff/parse/css/lex.go b/vendor/github.com/tdewolff/parse/css/lex.go
new file mode 100644
index 0000000..3924bb7
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/lex.go
@@ -0,0 +1,710 @@
+// Package css is a CSS3 lexer and parser following the specifications at http://www.w3.org/TR/css-syntax-3/.
+package css // import "github.com/tdewolff/parse/css"
+
+// TODO: \uFFFD replacement character for NULL bytes in strings for example, or atleast don't end the string early
+
+import (
+ "bytes"
+ "io"
+ "strconv"
+
+ "github.com/tdewolff/parse"
+ "github.com/tdewolff/parse/buffer"
+)
+
+// TokenType determines the type of token, eg. a number or a semicolon.
+type TokenType uint32
+
+// TokenType values.
+const (
+ ErrorToken TokenType = iota // extra token when errors occur
+ IdentToken
+ FunctionToken // rgb( rgba( ...
+ AtKeywordToken // @abc
+ HashToken // #abc
+ StringToken
+ BadStringToken
+ URLToken
+ BadURLToken
+ DelimToken // any unmatched character
+ NumberToken // 5
+ PercentageToken // 5%
+ DimensionToken // 5em
+ UnicodeRangeToken // U+554A
+ IncludeMatchToken // ~=
+ DashMatchToken // |=
+ PrefixMatchToken // ^=
+ SuffixMatchToken // $=
+ SubstringMatchToken // *=
+ ColumnToken // ||
+ WhitespaceToken // space \t \r \n \f
+ CDOToken // <!--
+ CDCToken // -->
+ ColonToken // :
+ SemicolonToken // ;
+ CommaToken // ,
+ LeftBracketToken // [
+ RightBracketToken // ]
+ LeftParenthesisToken // (
+ RightParenthesisToken // )
+ LeftBraceToken // {
+ RightBraceToken // }
+ CommentToken // extra token for comments
+ EmptyToken
+ CustomPropertyNameToken
+ CustomPropertyValueToken
+)
+
+// String returns the string representation of a TokenType.
+func (tt TokenType) String() string {
+ switch tt {
+ case ErrorToken:
+ return "Error"
+ case IdentToken:
+ return "Ident"
+ case FunctionToken:
+ return "Function"
+ case AtKeywordToken:
+ return "AtKeyword"
+ case HashToken:
+ return "Hash"
+ case StringToken:
+ return "String"
+ case BadStringToken:
+ return "BadString"
+ case URLToken:
+ return "URL"
+ case BadURLToken:
+ return "BadURL"
+ case DelimToken:
+ return "Delim"
+ case NumberToken:
+ return "Number"
+ case PercentageToken:
+ return "Percentage"
+ case DimensionToken:
+ return "Dimension"
+ case UnicodeRangeToken:
+ return "UnicodeRange"
+ case IncludeMatchToken:
+ return "IncludeMatch"
+ case DashMatchToken:
+ return "DashMatch"
+ case PrefixMatchToken:
+ return "PrefixMatch"
+ case SuffixMatchToken:
+ return "SuffixMatch"
+ case SubstringMatchToken:
+ return "SubstringMatch"
+ case ColumnToken:
+ return "Column"
+ case WhitespaceToken:
+ return "Whitespace"
+ case CDOToken:
+ return "CDO"
+ case CDCToken:
+ return "CDC"
+ case ColonToken:
+ return "Colon"
+ case SemicolonToken:
+ return "Semicolon"
+ case CommaToken:
+ return "Comma"
+ case LeftBracketToken:
+ return "LeftBracket"
+ case RightBracketToken:
+ return "RightBracket"
+ case LeftParenthesisToken:
+ return "LeftParenthesis"
+ case RightParenthesisToken:
+ return "RightParenthesis"
+ case LeftBraceToken:
+ return "LeftBrace"
+ case RightBraceToken:
+ return "RightBrace"
+ case CommentToken:
+ return "Comment"
+ case EmptyToken:
+ return "Empty"
+ case CustomPropertyNameToken:
+ return "CustomPropertyName"
+ case CustomPropertyValueToken:
+ return "CustomPropertyValue"
+ }
+ return "Invalid(" + strconv.Itoa(int(tt)) + ")"
+}
+
+////////////////////////////////////////////////////////////////
+
+// Lexer is the state for the lexer.
+type Lexer struct {
+ r *buffer.Lexer
+}
+
+// NewLexer returns a new Lexer for a given io.Reader.
+func NewLexer(r io.Reader) *Lexer {
+ return &Lexer{
+ buffer.NewLexer(r),
+ }
+}
+
+// Err returns the error encountered during lexing, this is often io.EOF but also other errors can be returned.
+func (l *Lexer) Err() error {
+ return l.r.Err()
+}
+
+// Restore restores the NULL byte at the end of the buffer.
+func (l *Lexer) Restore() {
+ l.r.Restore()
+}
+
+// Next returns the next Token. It returns ErrorToken when an error was encountered. Using Err() one can retrieve the error message.
+func (l *Lexer) Next() (TokenType, []byte) {
+ switch l.r.Peek(0) {
+ case ' ', '\t', '\n', '\r', '\f':
+ l.r.Move(1)
+ for l.consumeWhitespace() {
+ }
+ return WhitespaceToken, l.r.Shift()
+ case ':':
+ l.r.Move(1)
+ return ColonToken, l.r.Shift()
+ case ';':
+ l.r.Move(1)
+ return SemicolonToken, l.r.Shift()
+ case ',':
+ l.r.Move(1)
+ return CommaToken, l.r.Shift()
+ case '(', ')', '[', ']', '{', '}':
+ if t := l.consumeBracket(); t != ErrorToken {
+ return t, l.r.Shift()
+ }
+ case '#':
+ if l.consumeHashToken() {
+ return HashToken, l.r.Shift()
+ }
+ case '"', '\'':
+ if t := l.consumeString(); t != ErrorToken {
+ return t, l.r.Shift()
+ }
+ case '.', '+':
+ if t := l.consumeNumeric(); t != ErrorToken {
+ return t, l.r.Shift()
+ }
+ case '-':
+ if t := l.consumeNumeric(); t != ErrorToken {
+ return t, l.r.Shift()
+ } else if t := l.consumeIdentlike(); t != ErrorToken {
+ return t, l.r.Shift()
+ } else if l.consumeCDCToken() {
+ return CDCToken, l.r.Shift()
+ } else if l.consumeCustomVariableToken() {
+ return CustomPropertyNameToken, l.r.Shift()
+ }
+ case '@':
+ if l.consumeAtKeywordToken() {
+ return AtKeywordToken, l.r.Shift()
+ }
+ case '$', '*', '^', '~':
+ if t := l.consumeMatch(); t != ErrorToken {
+ return t, l.r.Shift()
+ }
+ case '/':
+ if l.consumeComment() {
+ return CommentToken, l.r.Shift()
+ }
+ case '<':
+ if l.consumeCDOToken() {
+ return CDOToken, l.r.Shift()
+ }
+ case '\\':
+ if t := l.consumeIdentlike(); t != ErrorToken {
+ return t, l.r.Shift()
+ }
+ case 'u', 'U':
+ if l.consumeUnicodeRangeToken() {
+ return UnicodeRangeToken, l.r.Shift()
+ } else if t := l.consumeIdentlike(); t != ErrorToken {
+ return t, l.r.Shift()
+ }
+ case '|':
+ if t := l.consumeMatch(); t != ErrorToken {
+ return t, l.r.Shift()
+ } else if l.consumeColumnToken() {
+ return ColumnToken, l.r.Shift()
+ }
+ case 0:
+ if l.Err() != nil {
+ return ErrorToken, nil
+ }
+ default:
+ if t := l.consumeNumeric(); t != ErrorToken {
+ return t, l.r.Shift()
+ } else if t := l.consumeIdentlike(); t != ErrorToken {
+ return t, l.r.Shift()
+ }
+ }
+ // can't be rune because consumeIdentlike consumes that as an identifier
+ l.r.Move(1)
+ return DelimToken, l.r.Shift()
+}
+
+////////////////////////////////////////////////////////////////
+
+/*
+The following functions follow the railroad diagrams in http://www.w3.org/TR/css3-syntax/
+*/
+
+func (l *Lexer) consumeByte(c byte) bool {
+ if l.r.Peek(0) == c {
+ l.r.Move(1)
+ return true
+ }
+ return false
+}
+
+func (l *Lexer) consumeComment() bool {
+ if l.r.Peek(0) != '/' || l.r.Peek(1) != '*' {
+ return false
+ }
+ l.r.Move(2)
+ for {
+ c := l.r.Peek(0)
+ if c == 0 && l.Err() != nil {
+ break
+ } else if c == '*' && l.r.Peek(1) == '/' {
+ l.r.Move(2)
+ return true
+ }
+ l.r.Move(1)
+ }
+ return true
+}
+
+func (l *Lexer) consumeNewline() bool {
+ c := l.r.Peek(0)
+ if c == '\n' || c == '\f' {
+ l.r.Move(1)
+ return true
+ } else if c == '\r' {
+ if l.r.Peek(1) == '\n' {
+ l.r.Move(2)
+ } else {
+ l.r.Move(1)
+ }
+ return true
+ }
+ return false
+}
+
+func (l *Lexer) consumeWhitespace() bool {
+ c := l.r.Peek(0)
+ if c == ' ' || c == '\t' || c == '\n' || c == '\r' || c == '\f' {
+ l.r.Move(1)
+ return true
+ }
+ return false
+}
+
+func (l *Lexer) consumeDigit() bool {
+ c := l.r.Peek(0)
+ if c >= '0' && c <= '9' {
+ l.r.Move(1)
+ return true
+ }
+ return false
+}
+
+func (l *Lexer) consumeHexDigit() bool {
+ c := l.r.Peek(0)
+ if (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F') {
+ l.r.Move(1)
+ return true
+ }
+ return false
+}
+
+func (l *Lexer) consumeEscape() bool {
+ if l.r.Peek(0) != '\\' {
+ return false
+ }
+ mark := l.r.Pos()
+ l.r.Move(1)
+ if l.consumeNewline() {
+ l.r.Rewind(mark)
+ return false
+ } else if l.consumeHexDigit() {
+ for k := 1; k < 6; k++ {
+ if !l.consumeHexDigit() {
+ break
+ }
+ }
+ l.consumeWhitespace()
+ return true
+ } else {
+ c := l.r.Peek(0)
+ if c >= 0xC0 {
+ _, n := l.r.PeekRune(0)
+ l.r.Move(n)
+ return true
+ } else if c == 0 && l.r.Err() != nil {
+ return true
+ }
+ }
+ l.r.Move(1)
+ return true
+}
+
+func (l *Lexer) consumeIdentToken() bool {
+ mark := l.r.Pos()
+ if l.r.Peek(0) == '-' {
+ l.r.Move(1)
+ }
+ c := l.r.Peek(0)
+ if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || c >= 0x80) {
+ if c != '\\' || !l.consumeEscape() {
+ l.r.Rewind(mark)
+ return false
+ }
+ } else {
+ l.r.Move(1)
+ }
+ for {
+ c := l.r.Peek(0)
+ if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '-' || c >= 0x80) {
+ if c != '\\' || !l.consumeEscape() {
+ break
+ }
+ } else {
+ l.r.Move(1)
+ }
+ }
+ return true
+}
+
+// support custom variables, https://www.w3.org/TR/css-variables-1/
+func (l *Lexer) consumeCustomVariableToken() bool {
+ // expect to be on a '-'
+ l.r.Move(1)
+ if l.r.Peek(0) != '-' {
+ l.r.Move(-1)
+ return false
+ }
+ if !l.consumeIdentToken() {
+ l.r.Move(-1)
+ return false
+ }
+ return true
+}
+
+func (l *Lexer) consumeAtKeywordToken() bool {
+ // expect to be on an '@'
+ l.r.Move(1)
+ if !l.consumeIdentToken() {
+ l.r.Move(-1)
+ return false
+ }
+ return true
+}
+
+func (l *Lexer) consumeHashToken() bool {
+ // expect to be on a '#'
+ mark := l.r.Pos()
+ l.r.Move(1)
+ c := l.r.Peek(0)
+ if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '-' || c >= 0x80) {
+ if c != '\\' || !l.consumeEscape() {
+ l.r.Rewind(mark)
+ return false
+ }
+ } else {
+ l.r.Move(1)
+ }
+ for {
+ c := l.r.Peek(0)
+ if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '-' || c >= 0x80) {
+ if c != '\\' || !l.consumeEscape() {
+ break
+ }
+ } else {
+ l.r.Move(1)
+ }
+ }
+ return true
+}
+
+func (l *Lexer) consumeNumberToken() bool {
+ mark := l.r.Pos()
+ c := l.r.Peek(0)
+ if c == '+' || c == '-' {
+ l.r.Move(1)
+ }
+ firstDigit := l.consumeDigit()
+ if firstDigit {
+ for l.consumeDigit() {
+ }
+ }
+ if l.r.Peek(0) == '.' {
+ l.r.Move(1)
+ if l.consumeDigit() {
+ for l.consumeDigit() {
+ }
+ } else if firstDigit {
+ // . could belong to the next token
+ l.r.Move(-1)
+ return true
+ } else {
+ l.r.Rewind(mark)
+ return false
+ }
+ } else if !firstDigit {
+ l.r.Rewind(mark)
+ return false
+ }
+ mark = l.r.Pos()
+ c = l.r.Peek(0)
+ if c == 'e' || c == 'E' {
+ l.r.Move(1)
+ c = l.r.Peek(0)
+ if c == '+' || c == '-' {
+ l.r.Move(1)
+ }
+ if !l.consumeDigit() {
+ // e could belong to next token
+ l.r.Rewind(mark)
+ return true
+ }
+ for l.consumeDigit() {
+ }
+ }
+ return true
+}
+
+func (l *Lexer) consumeUnicodeRangeToken() bool {
+ c := l.r.Peek(0)
+ if (c != 'u' && c != 'U') || l.r.Peek(1) != '+' {
+ return false
+ }
+ mark := l.r.Pos()
+ l.r.Move(2)
+ if l.consumeHexDigit() {
+ // consume up to 6 hexDigits
+ k := 1
+ for ; k < 6; k++ {
+ if !l.consumeHexDigit() {
+ break
+ }
+ }
+
+ // either a minus or a question mark or the end is expected
+ if l.consumeByte('-') {
+ // consume another up to 6 hexDigits
+ if l.consumeHexDigit() {
+ for k := 1; k < 6; k++ {
+ if !l.consumeHexDigit() {
+ break
+ }
+ }
+ } else {
+ l.r.Rewind(mark)
+ return false
+ }
+ } else {
+ // could be filled up to 6 characters with question marks or else regular hexDigits
+ if l.consumeByte('?') {
+ k++
+ for ; k < 6; k++ {
+ if !l.consumeByte('?') {
+ l.r.Rewind(mark)
+ return false
+ }
+ }
+ }
+ }
+ } else {
+ // consume 6 question marks
+ for k := 0; k < 6; k++ {
+ if !l.consumeByte('?') {
+ l.r.Rewind(mark)
+ return false
+ }
+ }
+ }
+ return true
+}
+
+func (l *Lexer) consumeColumnToken() bool {
+ if l.r.Peek(0) == '|' && l.r.Peek(1) == '|' {
+ l.r.Move(2)
+ return true
+ }
+ return false
+}
+
+func (l *Lexer) consumeCDOToken() bool {
+ if l.r.Peek(0) == '<' && l.r.Peek(1) == '!' && l.r.Peek(2) == '-' && l.r.Peek(3) == '-' {
+ l.r.Move(4)
+ return true
+ }
+ return false
+}
+
+func (l *Lexer) consumeCDCToken() bool {
+ if l.r.Peek(0) == '-' && l.r.Peek(1) == '-' && l.r.Peek(2) == '>' {
+ l.r.Move(3)
+ return true
+ }
+ return false
+}
+
+////////////////////////////////////////////////////////////////
+
+// consumeMatch consumes any MatchToken.
+func (l *Lexer) consumeMatch() TokenType {
+ if l.r.Peek(1) == '=' {
+ switch l.r.Peek(0) {
+ case '~':
+ l.r.Move(2)
+ return IncludeMatchToken
+ case '|':
+ l.r.Move(2)
+ return DashMatchToken
+ case '^':
+ l.r.Move(2)
+ return PrefixMatchToken
+ case '$':
+ l.r.Move(2)
+ return SuffixMatchToken
+ case '*':
+ l.r.Move(2)
+ return SubstringMatchToken
+ }
+ }
+ return ErrorToken
+}
+
+// consumeBracket consumes any bracket token.
+func (l *Lexer) consumeBracket() TokenType {
+ switch l.r.Peek(0) {
+ case '(':
+ l.r.Move(1)
+ return LeftParenthesisToken
+ case ')':
+ l.r.Move(1)
+ return RightParenthesisToken
+ case '[':
+ l.r.Move(1)
+ return LeftBracketToken
+ case ']':
+ l.r.Move(1)
+ return RightBracketToken
+ case '{':
+ l.r.Move(1)
+ return LeftBraceToken
+ case '}':
+ l.r.Move(1)
+ return RightBraceToken
+ }
+ return ErrorToken
+}
+
+// consumeNumeric consumes NumberToken, PercentageToken or DimensionToken.
+func (l *Lexer) consumeNumeric() TokenType {
+ if l.consumeNumberToken() {
+ if l.consumeByte('%') {
+ return PercentageToken
+ } else if l.consumeIdentToken() {
+ return DimensionToken
+ }
+ return NumberToken
+ }
+ return ErrorToken
+}
+
+// consumeString consumes a string and may return BadStringToken when a newline is encountered.
+func (l *Lexer) consumeString() TokenType {
+ // assume to be on " or '
+ delim := l.r.Peek(0)
+ l.r.Move(1)
+ for {
+ c := l.r.Peek(0)
+ if c == 0 && l.Err() != nil {
+ break
+ } else if c == '\n' || c == '\r' || c == '\f' {
+ l.r.Move(1)
+ return BadStringToken
+ } else if c == delim {
+ l.r.Move(1)
+ break
+ } else if c == '\\' {
+ if !l.consumeEscape() {
+ l.r.Move(1)
+ l.consumeNewline()
+ }
+ } else {
+ l.r.Move(1)
+ }
+ }
+ return StringToken
+}
+
+func (l *Lexer) consumeUnquotedURL() bool {
+ for {
+ c := l.r.Peek(0)
+ if c == 0 && l.Err() != nil || c == ')' {
+ break
+ } else if c == '"' || c == '\'' || c == '(' || c == '\\' || c == ' ' || c <= 0x1F || c == 0x7F {
+ if c != '\\' || !l.consumeEscape() {
+ return false
+ }
+ } else {
+ l.r.Move(1)
+ }
+ }
+ return true
+}
+
+// consumeRemnantsBadUrl consumes bytes of a BadUrlToken so that normal tokenization may continue.
+func (l *Lexer) consumeRemnantsBadURL() {
+ for {
+ if l.consumeByte(')') || l.Err() != nil {
+ break
+ } else if !l.consumeEscape() {
+ l.r.Move(1)
+ }
+ }
+}
+
+// consumeIdentlike consumes IdentToken, FunctionToken or UrlToken.
+func (l *Lexer) consumeIdentlike() TokenType {
+ if l.consumeIdentToken() {
+ if l.r.Peek(0) != '(' {
+ return IdentToken
+ } else if !parse.EqualFold(bytes.Replace(l.r.Lexeme(), []byte{'\\'}, nil, -1), []byte{'u', 'r', 'l'}) {
+ l.r.Move(1)
+ return FunctionToken
+ }
+ l.r.Move(1)
+
+ // consume url
+ for l.consumeWhitespace() {
+ }
+ if c := l.r.Peek(0); c == '"' || c == '\'' {
+ if l.consumeString() == BadStringToken {
+ l.consumeRemnantsBadURL()
+ return BadURLToken
+ }
+ } else if !l.consumeUnquotedURL() && !l.consumeWhitespace() {
+ l.consumeRemnantsBadURL()
+ return BadURLToken
+ }
+ for l.consumeWhitespace() {
+ }
+ if !l.consumeByte(')') && l.Err() != io.EOF {
+ l.consumeRemnantsBadURL()
+ return BadURLToken
+ }
+ return URLToken
+ }
+ return ErrorToken
+}
diff --git a/vendor/github.com/tdewolff/parse/css/lex_test.go b/vendor/github.com/tdewolff/parse/css/lex_test.go
new file mode 100644
index 0000000..0bdc891
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/lex_test.go
@@ -0,0 +1,143 @@
+package css // import "github.com/tdewolff/parse/css"
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "testing"
+
+ "github.com/tdewolff/test"
+)
+
+type TTs []TokenType
+
+func TestTokens(t *testing.T) {
+ var tokenTests = []struct {
+ css string
+ expected []TokenType
+ }{
+ {" ", TTs{}},
+ {"5.2 .4", TTs{NumberToken, NumberToken}},
+ {"color: red;", TTs{IdentToken, ColonToken, IdentToken, SemicolonToken}},
+ {"background: url(\"http://x\");", TTs{IdentToken, ColonToken, URLToken, SemicolonToken}},
+ {"background: URL(x.png);", TTs{IdentToken, ColonToken, URLToken, SemicolonToken}},
+ {"color: rgb(4, 0%, 5em);", TTs{IdentToken, ColonToken, FunctionToken, NumberToken, CommaToken, PercentageToken, CommaToken, DimensionToken, RightParenthesisToken, SemicolonToken}},
+ {"body { \"string\" }", TTs{IdentToken, LeftBraceToken, StringToken, RightBraceToken}},
+ {"body { \"str\\\"ing\" }", TTs{IdentToken, LeftBraceToken, StringToken, RightBraceToken}},
+ {".class { }", TTs{DelimToken, IdentToken, LeftBraceToken, RightBraceToken}},
+ {"#class { }", TTs{HashToken, LeftBraceToken, RightBraceToken}},
+ {"#class\\#withhash { }", TTs{HashToken, LeftBraceToken, RightBraceToken}},
+ {"@media print { }", TTs{AtKeywordToken, IdentToken, LeftBraceToken, RightBraceToken}},
+ {"/*comment*/", TTs{CommentToken}},
+ {"/*com* /ment*/", TTs{CommentToken}},
+ {"~= |= ^= $= *=", TTs{IncludeMatchToken, DashMatchToken, PrefixMatchToken, SuffixMatchToken, SubstringMatchToken}},
+ {"||", TTs{ColumnToken}},
+ {"<!-- -->", TTs{CDOToken, CDCToken}},
+ {"U+1234", TTs{UnicodeRangeToken}},
+ {"5.2 .4 4e-22", TTs{NumberToken, NumberToken, NumberToken}},
+ {"--custom-variable", TTs{CustomPropertyNameToken}},
+
+ // unexpected ending
+ {"ident", TTs{IdentToken}},
+ {"123.", TTs{NumberToken, DelimToken}},
+ {"\"string", TTs{StringToken}},
+ {"123/*comment", TTs{NumberToken, CommentToken}},
+ {"U+1-", TTs{IdentToken, NumberToken, DelimToken}},
+
+ // unicode
+ {"fooδbar􀀀", TTs{IdentToken}},
+ {"foo\\æ\\†", TTs{IdentToken}},
+ // {"foo\x00bar", TTs{IdentToken}},
+ {"'foo\u554abar'", TTs{StringToken}},
+ {"\\000026B", TTs{IdentToken}},
+ {"\\26 B", TTs{IdentToken}},
+
+ // hacks
+ {`\-\mo\z\-b\i\nd\in\g:\url(//business\i\nfo.co.uk\/labs\/xbl\/xbl\.xml\#xss);`, TTs{IdentToken, ColonToken, URLToken, SemicolonToken}},
+ {"width/**/:/**/ 40em;", TTs{IdentToken, CommentToken, ColonToken, CommentToken, DimensionToken, SemicolonToken}},
+ {":root *> #quince", TTs{ColonToken, IdentToken, DelimToken, DelimToken, HashToken}},
+ {"html[xmlns*=\"\"]:root", TTs{IdentToken, LeftBracketToken, IdentToken, SubstringMatchToken, StringToken, RightBracketToken, ColonToken, IdentToken}},
+ {"body:nth-of-type(1)", TTs{IdentToken, ColonToken, FunctionToken, NumberToken, RightParenthesisToken}},
+ {"color/*\\**/: blue\\9;", TTs{IdentToken, CommentToken, ColonToken, IdentToken, SemicolonToken}},
+ {"color: blue !ie;", TTs{IdentToken, ColonToken, IdentToken, DelimToken, IdentToken, SemicolonToken}},
+
+ // escapes, null and replacement character
+ {"c\\\x00olor: white;", TTs{IdentToken, ColonToken, IdentToken, SemicolonToken}},
+ {"null\\0", TTs{IdentToken}},
+ {"eof\\", TTs{IdentToken}},
+ {"\"a\x00b\"", TTs{StringToken}},
+ {"a\\\x00b", TTs{IdentToken}},
+ {"url(a\x00b)", TTs{BadURLToken}}, // null character cannot be unquoted
+ {"/*a\x00b*/", TTs{CommentToken}},
+
+ // coverage
+ {" \n\r\n\r\"\\\r\n\\\r\"", TTs{StringToken}},
+ {"U+?????? U+ABCD?? U+ABC-DEF", TTs{UnicodeRangeToken, UnicodeRangeToken, UnicodeRangeToken}},
+ {"U+? U+A?", TTs{IdentToken, DelimToken, DelimToken, IdentToken, DelimToken, IdentToken, DelimToken}},
+ {"-5.23 -moz", TTs{NumberToken, IdentToken}},
+ {"()", TTs{LeftParenthesisToken, RightParenthesisToken}},
+ {"url( //url )", TTs{URLToken}},
+ {"url( ", TTs{URLToken}},
+ {"url( //url", TTs{URLToken}},
+ {"url(\")a", TTs{URLToken}},
+ {"url(a'\\\n)a", TTs{BadURLToken, IdentToken}},
+ {"url(\"\n)a", TTs{BadURLToken, IdentToken}},
+ {"url(a h)a", TTs{BadURLToken, IdentToken}},
+ {"<!- | @4 ## /2", TTs{DelimToken, DelimToken, DelimToken, DelimToken, DelimToken, NumberToken, DelimToken, DelimToken, DelimToken, NumberToken}},
+ {"\"s\\\n\"", TTs{StringToken}},
+ {"\"a\\\"b\"", TTs{StringToken}},
+ {"\"s\n", TTs{BadStringToken}},
+
+ // small
+ {"\"abcd", TTs{StringToken}},
+ {"/*comment", TTs{CommentToken}},
+ {"U+A-B", TTs{UnicodeRangeToken}},
+ {"url((", TTs{BadURLToken}},
+ {"id\u554a", TTs{IdentToken}},
+ }
+ for _, tt := range tokenTests {
+ t.Run(tt.css, func(t *testing.T) {
+ l := NewLexer(bytes.NewBufferString(tt.css))
+ i := 0
+ for {
+ token, _ := l.Next()
+ if token == ErrorToken {
+ test.T(t, l.Err(), io.EOF)
+ test.T(t, i, len(tt.expected), "when error occurred we must be at the end")
+ break
+ } else if token == WhitespaceToken {
+ continue
+ }
+ test.That(t, i < len(tt.expected), "index", i, "must not exceed expected token types size", len(tt.expected))
+ if i < len(tt.expected) {
+ test.T(t, token, tt.expected[i], "token types must match")
+ }
+ i++
+ }
+ })
+ }
+
+ test.T(t, WhitespaceToken.String(), "Whitespace")
+ test.T(t, EmptyToken.String(), "Empty")
+ test.T(t, CustomPropertyValueToken.String(), "CustomPropertyValue")
+ test.T(t, TokenType(100).String(), "Invalid(100)")
+ test.T(t, NewLexer(bytes.NewBufferString("x")).consumeBracket(), ErrorToken, "consumeBracket on 'x' must return error")
+}
+
+////////////////////////////////////////////////////////////////
+
+func ExampleNewLexer() {
+ l := NewLexer(bytes.NewBufferString("color: red;"))
+ out := ""
+ for {
+ tt, data := l.Next()
+ if tt == ErrorToken {
+ break
+ } else if tt == WhitespaceToken || tt == CommentToken {
+ continue
+ }
+ out += string(data)
+ }
+ fmt.Println(out)
+ // Output: color:red;
+}
diff --git a/vendor/github.com/tdewolff/parse/css/parse.go b/vendor/github.com/tdewolff/parse/css/parse.go
new file mode 100644
index 0000000..e48cd66
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/parse.go
@@ -0,0 +1,398 @@
+package css // import "github.com/tdewolff/parse/css"
+
+import (
+ "bytes"
+ "io"
+ "strconv"
+
+ "github.com/tdewolff/parse"
+)
+
+var wsBytes = []byte(" ")
+var endBytes = []byte("}")
+var emptyBytes = []byte("")
+
+// GrammarType determines the type of grammar.
+type GrammarType uint32
+
+// GrammarType values.
+const (
+ ErrorGrammar GrammarType = iota // extra token when errors occur
+ CommentGrammar
+ AtRuleGrammar
+ BeginAtRuleGrammar
+ EndAtRuleGrammar
+ QualifiedRuleGrammar
+ BeginRulesetGrammar
+ EndRulesetGrammar
+ DeclarationGrammar
+ TokenGrammar
+ CustomPropertyGrammar
+)
+
+// String returns the string representation of a GrammarType.
+func (tt GrammarType) String() string {
+ switch tt {
+ case ErrorGrammar:
+ return "Error"
+ case CommentGrammar:
+ return "Comment"
+ case AtRuleGrammar:
+ return "AtRule"
+ case BeginAtRuleGrammar:
+ return "BeginAtRule"
+ case EndAtRuleGrammar:
+ return "EndAtRule"
+ case QualifiedRuleGrammar:
+ return "QualifiedRule"
+ case BeginRulesetGrammar:
+ return "BeginRuleset"
+ case EndRulesetGrammar:
+ return "EndRuleset"
+ case DeclarationGrammar:
+ return "Declaration"
+ case TokenGrammar:
+ return "Token"
+ case CustomPropertyGrammar:
+ return "CustomProperty"
+ }
+ return "Invalid(" + strconv.Itoa(int(tt)) + ")"
+}
+
+////////////////////////////////////////////////////////////////
+
+// State is the state function the parser currently is in.
+type State func(*Parser) GrammarType
+
+// Token is a single TokenType and its associated data.
+type Token struct {
+ TokenType
+ Data []byte
+}
+
+// Parser is the state for the parser.
+type Parser struct {
+ l *Lexer
+ state []State
+ err error
+
+ buf []Token
+ level int
+
+ tt TokenType
+ data []byte
+ prevWS bool
+ prevEnd bool
+}
+
+// NewParser returns a new CSS parser from an io.Reader. isInline specifies whether this is an inline style attribute.
+func NewParser(r io.Reader, isInline bool) *Parser {
+ l := NewLexer(r)
+ p := &Parser{
+ l: l,
+ state: make([]State, 0, 4),
+ }
+
+ if isInline {
+ p.state = append(p.state, (*Parser).parseDeclarationList)
+ } else {
+ p.state = append(p.state, (*Parser).parseStylesheet)
+ }
+ return p
+}
+
+// Err returns the error encountered during parsing, this is often io.EOF but also other errors can be returned.
+func (p *Parser) Err() error {
+ if p.err != nil {
+ return p.err
+ }
+ return p.l.Err()
+}
+
+// Restore restores the NULL byte at the end of the buffer.
+func (p *Parser) Restore() {
+ p.l.Restore()
+}
+
+// Next returns the next Grammar. It returns ErrorGrammar when an error was encountered. Using Err() one can retrieve the error message.
+func (p *Parser) Next() (GrammarType, TokenType, []byte) {
+ p.err = nil
+
+ if p.prevEnd {
+ p.tt, p.data = RightBraceToken, endBytes
+ p.prevEnd = false
+ } else {
+ p.tt, p.data = p.popToken(true)
+ }
+ gt := p.state[len(p.state)-1](p)
+ return gt, p.tt, p.data
+}
+
+// Values returns a slice of Tokens for the last Grammar. Only AtRuleGrammar, BeginAtRuleGrammar, BeginRulesetGrammar and Declaration will return the at-rule components, ruleset selector and declaration values respectively.
+func (p *Parser) Values() []Token {
+ return p.buf
+}
+
+func (p *Parser) popToken(allowComment bool) (TokenType, []byte) {
+ p.prevWS = false
+ tt, data := p.l.Next()
+ for tt == WhitespaceToken || tt == CommentToken {
+ if tt == WhitespaceToken {
+ p.prevWS = true
+ } else if allowComment && len(p.state) == 1 {
+ break
+ }
+ tt, data = p.l.Next()
+ }
+ return tt, data
+}
+
+func (p *Parser) initBuf() {
+ p.buf = p.buf[:0]
+}
+
+func (p *Parser) pushBuf(tt TokenType, data []byte) {
+ p.buf = append(p.buf, Token{tt, data})
+}
+
+////////////////////////////////////////////////////////////////
+
+func (p *Parser) parseStylesheet() GrammarType {
+ if p.tt == CDOToken || p.tt == CDCToken {
+ return TokenGrammar
+ } else if p.tt == AtKeywordToken {
+ return p.parseAtRule()
+ } else if p.tt == CommentToken {
+ return CommentGrammar
+ } else if p.tt == ErrorToken {
+ return ErrorGrammar
+ }
+ return p.parseQualifiedRule()
+}
+
+func (p *Parser) parseDeclarationList() GrammarType {
+ if p.tt == CommentToken {
+ p.tt, p.data = p.popToken(false)
+ }
+ for p.tt == SemicolonToken {
+ p.tt, p.data = p.popToken(false)
+ }
+ if p.tt == ErrorToken {
+ return ErrorGrammar
+ } else if p.tt == AtKeywordToken {
+ return p.parseAtRule()
+ } else if p.tt == IdentToken {
+ return p.parseDeclaration()
+ } else if p.tt == CustomPropertyNameToken {
+ return p.parseCustomProperty()
+ }
+
+ // parse error
+ p.initBuf()
+ p.err = parse.NewErrorLexer("unexpected token in declaration", p.l.r)
+ for {
+ tt, data := p.popToken(false)
+ if (tt == SemicolonToken || tt == RightBraceToken) && p.level == 0 || tt == ErrorToken {
+ p.prevEnd = (tt == RightBraceToken)
+ return ErrorGrammar
+ }
+ p.pushBuf(tt, data)
+ }
+}
+
+////////////////////////////////////////////////////////////////
+
+func (p *Parser) parseAtRule() GrammarType {
+ p.initBuf()
+ parse.ToLower(p.data)
+ atRuleName := p.data
+ if len(atRuleName) > 0 && atRuleName[1] == '-' {
+ if i := bytes.IndexByte(atRuleName[2:], '-'); i != -1 {
+ atRuleName = atRuleName[i+2:] // skip vendor specific prefix
+ }
+ }
+ atRule := ToHash(atRuleName[1:])
+
+ first := true
+ skipWS := false
+ for {
+ tt, data := p.popToken(false)
+ if tt == LeftBraceToken && p.level == 0 {
+ if atRule == Font_Face || atRule == Page {
+ p.state = append(p.state, (*Parser).parseAtRuleDeclarationList)
+ } else if atRule == Document || atRule == Keyframes || atRule == Media || atRule == Supports {
+ p.state = append(p.state, (*Parser).parseAtRuleRuleList)
+ } else {
+ p.state = append(p.state, (*Parser).parseAtRuleUnknown)
+ }
+ return BeginAtRuleGrammar
+ } else if (tt == SemicolonToken || tt == RightBraceToken) && p.level == 0 || tt == ErrorToken {
+ p.prevEnd = (tt == RightBraceToken)
+ return AtRuleGrammar
+ } else if tt == LeftParenthesisToken || tt == LeftBraceToken || tt == LeftBracketToken || tt == FunctionToken {
+ p.level++
+ } else if tt == RightParenthesisToken || tt == RightBraceToken || tt == RightBracketToken {
+ p.level--
+ }
+ if first {
+ if tt == LeftParenthesisToken || tt == LeftBracketToken {
+ p.prevWS = false
+ }
+ first = false
+ }
+ if len(data) == 1 && (data[0] == ',' || data[0] == ':') {
+ skipWS = true
+ } else if p.prevWS && !skipWS && tt != RightParenthesisToken {
+ p.pushBuf(WhitespaceToken, wsBytes)
+ } else {
+ skipWS = false
+ }
+ if tt == LeftParenthesisToken {
+ skipWS = true
+ }
+ p.pushBuf(tt, data)
+ }
+}
+
+func (p *Parser) parseAtRuleRuleList() GrammarType {
+ if p.tt == RightBraceToken || p.tt == ErrorToken {
+ p.state = p.state[:len(p.state)-1]
+ return EndAtRuleGrammar
+ } else if p.tt == AtKeywordToken {
+ return p.parseAtRule()
+ } else {
+ return p.parseQualifiedRule()
+ }
+}
+
+func (p *Parser) parseAtRuleDeclarationList() GrammarType {
+ for p.tt == SemicolonToken {
+ p.tt, p.data = p.popToken(false)
+ }
+ if p.tt == RightBraceToken || p.tt == ErrorToken {
+ p.state = p.state[:len(p.state)-1]
+ return EndAtRuleGrammar
+ }
+ return p.parseDeclarationList()
+}
+
+func (p *Parser) parseAtRuleUnknown() GrammarType {
+ if p.tt == RightBraceToken && p.level == 0 || p.tt == ErrorToken {
+ p.state = p.state[:len(p.state)-1]
+ return EndAtRuleGrammar
+ }
+ if p.tt == LeftParenthesisToken || p.tt == LeftBraceToken || p.tt == LeftBracketToken || p.tt == FunctionToken {
+ p.level++
+ } else if p.tt == RightParenthesisToken || p.tt == RightBraceToken || p.tt == RightBracketToken {
+ p.level--
+ }
+ return TokenGrammar
+}
+
+func (p *Parser) parseQualifiedRule() GrammarType {
+ p.initBuf()
+ first := true
+ inAttrSel := false
+ skipWS := true
+ var tt TokenType
+ var data []byte
+ for {
+ if first {
+ tt, data = p.tt, p.data
+ p.tt = WhitespaceToken
+ p.data = emptyBytes
+ first = false
+ } else {
+ tt, data = p.popToken(false)
+ }
+ if tt == LeftBraceToken && p.level == 0 {
+ p.state = append(p.state, (*Parser).parseQualifiedRuleDeclarationList)
+ return BeginRulesetGrammar
+ } else if tt == ErrorToken {
+ p.err = parse.NewErrorLexer("unexpected ending in qualified rule, expected left brace token", p.l.r)
+ return ErrorGrammar
+ } else if tt == LeftParenthesisToken || tt == LeftBraceToken || tt == LeftBracketToken || tt == FunctionToken {
+ p.level++
+ } else if tt == RightParenthesisToken || tt == RightBraceToken || tt == RightBracketToken {
+ p.level--
+ }
+ if len(data) == 1 && (data[0] == ',' || data[0] == '>' || data[0] == '+' || data[0] == '~') {
+ if data[0] == ',' {
+ return QualifiedRuleGrammar
+ }
+ skipWS = true
+ } else if p.prevWS && !skipWS && !inAttrSel {
+ p.pushBuf(WhitespaceToken, wsBytes)
+ } else {
+ skipWS = false
+ }
+ if tt == LeftBracketToken {
+ inAttrSel = true
+ } else if tt == RightBracketToken {
+ inAttrSel = false
+ }
+ p.pushBuf(tt, data)
+ }
+}
+
+func (p *Parser) parseQualifiedRuleDeclarationList() GrammarType {
+ for p.tt == SemicolonToken {
+ p.tt, p.data = p.popToken(false)
+ }
+ if p.tt == RightBraceToken || p.tt == ErrorToken {
+ p.state = p.state[:len(p.state)-1]
+ return EndRulesetGrammar
+ }
+ return p.parseDeclarationList()
+}
+
+func (p *Parser) parseDeclaration() GrammarType {
+ p.initBuf()
+ parse.ToLower(p.data)
+ if tt, _ := p.popToken(false); tt != ColonToken {
+ p.err = parse.NewErrorLexer("unexpected token in declaration", p.l.r)
+ return ErrorGrammar
+ }
+ skipWS := true
+ for {
+ tt, data := p.popToken(false)
+ if (tt == SemicolonToken || tt == RightBraceToken) && p.level == 0 || tt == ErrorToken {
+ p.prevEnd = (tt == RightBraceToken)
+ return DeclarationGrammar
+ } else if tt == LeftParenthesisToken || tt == LeftBraceToken || tt == LeftBracketToken || tt == FunctionToken {
+ p.level++
+ } else if tt == RightParenthesisToken || tt == RightBraceToken || tt == RightBracketToken {
+ p.level--
+ }
+ if len(data) == 1 && (data[0] == ',' || data[0] == '/' || data[0] == ':' || data[0] == '!' || data[0] == '=') {
+ skipWS = true
+ } else if p.prevWS && !skipWS {
+ p.pushBuf(WhitespaceToken, wsBytes)
+ } else {
+ skipWS = false
+ }
+ p.pushBuf(tt, data)
+ }
+}
+
+func (p *Parser) parseCustomProperty() GrammarType {
+ p.initBuf()
+ if tt, _ := p.popToken(false); tt != ColonToken {
+ p.err = parse.NewErrorLexer("unexpected token in declaration", p.l.r)
+ return ErrorGrammar
+ }
+ val := []byte{}
+ for {
+ tt, data := p.l.Next()
+ if (tt == SemicolonToken || tt == RightBraceToken) && p.level == 0 || tt == ErrorToken {
+ p.prevEnd = (tt == RightBraceToken)
+ p.pushBuf(CustomPropertyValueToken, val)
+ return CustomPropertyGrammar
+ } else if tt == LeftParenthesisToken || tt == LeftBraceToken || tt == LeftBracketToken || tt == FunctionToken {
+ p.level++
+ } else if tt == RightParenthesisToken || tt == RightBraceToken || tt == RightBracketToken {
+ p.level--
+ }
+ val = append(val, data...)
+ }
+}
diff --git a/vendor/github.com/tdewolff/parse/css/parse_test.go b/vendor/github.com/tdewolff/parse/css/parse_test.go
new file mode 100644
index 0000000..33f6f5f
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/parse_test.go
@@ -0,0 +1,248 @@
+package css // import "github.com/tdewolff/parse/css"
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "testing"
+
+ "github.com/tdewolff/parse"
+ "github.com/tdewolff/test"
+)
+
+////////////////////////////////////////////////////////////////
+
+func TestParse(t *testing.T) {
+ var parseTests = []struct {
+ inline bool
+ css string
+ expected string
+ }{
+ {true, " x : y ; ", "x:y;"},
+ {true, "color: red;", "color:red;"},
+ {true, "color : red;", "color:red;"},
+ {true, "color: red; border: 0;", "color:red;border:0;"},
+ {true, "color: red !important;", "color:red!important;"},
+ {true, "color: red ! important;", "color:red!important;"},
+ {true, "white-space: -moz-pre-wrap;", "white-space:-moz-pre-wrap;"},
+ {true, "display: -moz-inline-stack;", "display:-moz-inline-stack;"},
+ {true, "x: 10px / 1em;", "x:10px/1em;"},
+ {true, "x: 1em/1.5em \"Times New Roman\", Times, serif;", "x:1em/1.5em \"Times New Roman\",Times,serif;"},
+ {true, "x: hsla(100,50%, 75%, 0.5);", "x:hsla(100,50%,75%,0.5);"},
+ {true, "x: hsl(100,50%, 75%);", "x:hsl(100,50%,75%);"},
+ {true, "x: rgba(255, 238 , 221, 0.3);", "x:rgba(255,238,221,0.3);"},
+ {true, "x: 50vmax;", "x:50vmax;"},
+ {true, "color: linear-gradient(to right, black, white);", "color:linear-gradient(to right,black,white);"},
+ {true, "color: calc(100%/2 - 1em);", "color:calc(100%/2 - 1em);"},
+ {true, "color: calc(100%/2--1em);", "color:calc(100%/2--1em);"},
+ {false, "<!-- @charset; -->", "<!--@charset;-->"},
+ {false, "@media print, screen { }", "@media print,screen{}"},
+ {false, "@media { @viewport ; }", "@media{@viewport;}"},
+ {false, "@keyframes 'diagonal-slide' { from { left: 0; top: 0; } to { left: 100px; top: 100px; } }", "@keyframes 'diagonal-slide'{from{left:0;top:0;}to{left:100px;top:100px;}}"},
+ {false, "@keyframes movingbox{0%{left:90%;}50%{left:10%;}100%{left:90%;}}", "@keyframes movingbox{0%{left:90%;}50%{left:10%;}100%{left:90%;}}"},
+ {false, ".foo { color: #fff;}", ".foo{color:#fff;}"},
+ {false, ".foo { ; _color: #fff;}", ".foo{_color:#fff;}"},
+ {false, "a { color: red; border: 0; }", "a{color:red;border:0;}"},
+ {false, "a { color: red; border: 0; } b { padding: 0; }", "a{color:red;border:0;}b{padding:0;}"},
+ {false, "/* comment */", "/* comment */"},
+
+ // extraordinary
+ {true, "color: red;;", "color:red;"},
+ {true, "color:#c0c0c0", "color:#c0c0c0;"},
+ {true, "background:URL(x.png);", "background:URL(x.png);"},
+ {true, "filter: progid : DXImageTransform.Microsoft.BasicImage(rotation=1);", "filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=1);"},
+ {true, "/*a*/\n/*c*/\nkey: value;", "key:value;"},
+ {true, "@-moz-charset;", "@-moz-charset;"},
+ {true, "--custom-variable: (0;) ;", "--custom-variable: (0;) ;"},
+ {false, "@import;@import;", "@import;@import;"},
+ {false, ".a .b#c, .d<.e { x:y; }", ".a .b#c,.d<.e{x:y;}"},
+ {false, ".a[b~=c]d { x:y; }", ".a[b~=c]d{x:y;}"},
+ // {false, "{x:y;}", "{x:y;}"},
+ {false, "a{}", "a{}"},
+ {false, "a,.b/*comment*/ {x:y;}", "a,.b{x:y;}"},
+ {false, "a,.b/*comment*/.c {x:y;}", "a,.b.c{x:y;}"},
+ {false, "a{x:; z:q;}", "a{x:;z:q;}"},
+ {false, "@font-face { x:y; }", "@font-face{x:y;}"},
+ {false, "a:not([controls]){x:y;}", "a:not([controls]){x:y;}"},
+ {false, "@document regexp('https:.*') { p { color: red; } }", "@document regexp('https:.*'){p{color:red;}}"},
+ {false, "@media all and ( max-width:400px ) { }", "@media all and (max-width:400px){}"},
+ {false, "@media (max-width:400px) { }", "@media(max-width:400px){}"},
+ {false, "@media (max-width:400px)", "@media(max-width:400px);"},
+ {false, "@font-face { ; font:x; }", "@font-face{font:x;}"},
+ {false, "@-moz-font-face { ; font:x; }", "@-moz-font-face{font:x;}"},
+ {false, "@unknown abc { {} lala }", "@unknown abc{{}lala}"},
+ {false, "a[x={}]{x:y;}", "a[x={}]{x:y;}"},
+ {false, "a[x=,]{x:y;}", "a[x=,]{x:y;}"},
+ {false, "a[x=+]{x:y;}", "a[x=+]{x:y;}"},
+ {false, ".cla .ss > #id { x:y; }", ".cla .ss>#id{x:y;}"},
+ {false, ".cla /*a*/ /*b*/ .ss{}", ".cla .ss{}"},
+ {false, "a{x:f(a(),b);}", "a{x:f(a(),b);}"},
+ {false, "a{x:y!z;}", "a{x:y!z;}"},
+ {false, "[class*=\"column\"]+[class*=\"column\"]:last-child{a:b;}", "[class*=\"column\"]+[class*=\"column\"]:last-child{a:b;}"},
+ {false, "@media { @viewport }", "@media{@viewport;}"},
+ {false, "table { @unknown }", "table{@unknown;}"},
+
+ // early endings
+ {false, "selector{", "selector{"},
+ {false, "@media{selector{", "@media{selector{"},
+
+ // bad grammar
+ {true, "~color:red", "~color:red;"},
+ {false, ".foo { *color: #fff;}", ".foo{*color:#fff;}"},
+ {true, "*color: red; font-size: 12pt;", "*color:red;font-size:12pt;"},
+ {true, "_color: red; font-size: 12pt;", "_color:red;font-size:12pt;"},
+
+ // issues
+ {false, "@media print {.class{width:5px;}}", "@media print{.class{width:5px;}}"}, // #6
+ {false, ".class{width:calc((50% + 2em)/2 + 14px);}", ".class{width:calc((50% + 2em)/2 + 14px);}"}, // #7
+ {false, ".class [c=y]{}", ".class [c=y]{}"}, // tdewolff/minify#16
+ {false, "table{font-family:Verdana}", "table{font-family:Verdana;}"}, // tdewolff/minify#22
+
+ // go-fuzz
+ {false, "@-webkit-", "@-webkit-;"},
+ }
+ for _, tt := range parseTests {
+ t.Run(tt.css, func(t *testing.T) {
+ output := ""
+ p := NewParser(bytes.NewBufferString(tt.css), tt.inline)
+ for {
+ grammar, _, data := p.Next()
+ data = parse.Copy(data)
+ if grammar == ErrorGrammar {
+ if err := p.Err(); err != io.EOF {
+ for _, val := range p.Values() {
+ data = append(data, val.Data...)
+ }
+ if perr, ok := err.(*parse.Error); ok && perr.Message == "unexpected token in declaration" {
+ data = append(data, ";"...)
+ }
+ } else {
+ test.T(t, err, io.EOF)
+ break
+ }
+ } else if grammar == AtRuleGrammar || grammar == BeginAtRuleGrammar || grammar == QualifiedRuleGrammar || grammar == BeginRulesetGrammar || grammar == DeclarationGrammar || grammar == CustomPropertyGrammar {
+ if grammar == DeclarationGrammar || grammar == CustomPropertyGrammar {
+ data = append(data, ":"...)
+ }
+ for _, val := range p.Values() {
+ data = append(data, val.Data...)
+ }
+ if grammar == BeginAtRuleGrammar || grammar == BeginRulesetGrammar {
+ data = append(data, "{"...)
+ } else if grammar == AtRuleGrammar || grammar == DeclarationGrammar || grammar == CustomPropertyGrammar {
+ data = append(data, ";"...)
+ } else if grammar == QualifiedRuleGrammar {
+ data = append(data, ","...)
+ }
+ }
+ output += string(data)
+ }
+ test.String(t, output, tt.expected)
+ })
+ }
+
+ test.T(t, ErrorGrammar.String(), "Error")
+ test.T(t, AtRuleGrammar.String(), "AtRule")
+ test.T(t, BeginAtRuleGrammar.String(), "BeginAtRule")
+ test.T(t, EndAtRuleGrammar.String(), "EndAtRule")
+ test.T(t, BeginRulesetGrammar.String(), "BeginRuleset")
+ test.T(t, EndRulesetGrammar.String(), "EndRuleset")
+ test.T(t, DeclarationGrammar.String(), "Declaration")
+ test.T(t, TokenGrammar.String(), "Token")
+ test.T(t, CommentGrammar.String(), "Comment")
+ test.T(t, CustomPropertyGrammar.String(), "CustomProperty")
+ test.T(t, GrammarType(100).String(), "Invalid(100)")
+}
+
+func TestParseError(t *testing.T) {
+ var parseErrorTests = []struct {
+ inline bool
+ css string
+ col int
+ }{
+ {false, "selector", 9},
+ {true, "color 0", 8},
+ {true, "--color 0", 10},
+ {true, "--custom-variable:0", 0},
+ }
+ for _, tt := range parseErrorTests {
+ t.Run(tt.css, func(t *testing.T) {
+ p := NewParser(bytes.NewBufferString(tt.css), tt.inline)
+ for {
+ grammar, _, _ := p.Next()
+ if grammar == ErrorGrammar {
+ if tt.col == 0 {
+ test.T(t, p.Err(), io.EOF)
+ } else if perr, ok := p.Err().(*parse.Error); ok {
+ test.T(t, perr.Col, tt.col)
+ } else {
+ test.Fail(t, "bad error:", p.Err())
+ }
+ break
+ }
+ }
+ })
+ }
+}
+
+func TestReader(t *testing.T) {
+ input := "x:a;"
+ p := NewParser(test.NewPlainReader(bytes.NewBufferString(input)), true)
+ for {
+ grammar, _, _ := p.Next()
+ if grammar == ErrorGrammar {
+ break
+ }
+ }
+}
+
+////////////////////////////////////////////////////////////////
+
+type Obj struct{}
+
+func (*Obj) F() {}
+
+var f1 func(*Obj)
+
+func BenchmarkFuncPtr(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ f1 = (*Obj).F
+ }
+}
+
+var f2 func()
+
+func BenchmarkMemFuncPtr(b *testing.B) {
+ obj := &Obj{}
+ for i := 0; i < b.N; i++ {
+ f2 = obj.F
+ }
+}
+
+func ExampleNewParser() {
+ p := NewParser(bytes.NewBufferString("color: red;"), true) // false because this is the content of an inline style attribute
+ out := ""
+ for {
+ gt, _, data := p.Next()
+ if gt == ErrorGrammar {
+ break
+ } else if gt == AtRuleGrammar || gt == BeginAtRuleGrammar || gt == BeginRulesetGrammar || gt == DeclarationGrammar {
+ out += string(data)
+ if gt == DeclarationGrammar {
+ out += ":"
+ }
+ for _, val := range p.Values() {
+ out += string(val.Data)
+ }
+ if gt == BeginAtRuleGrammar || gt == BeginRulesetGrammar {
+ out += "{"
+ } else if gt == AtRuleGrammar || gt == DeclarationGrammar {
+ out += ";"
+ }
+ } else {
+ out += string(data)
+ }
+ }
+ fmt.Println(out)
+ // Output: color:red;
+}
diff --git a/vendor/github.com/tdewolff/parse/css/util.go b/vendor/github.com/tdewolff/parse/css/util.go
new file mode 100644
index 0000000..676dee8
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/util.go
@@ -0,0 +1,47 @@
+package css // import "github.com/tdewolff/parse/css"
+
+import "github.com/tdewolff/parse/buffer"
+
+// IsIdent returns true if the bytes are a valid identifier.
+func IsIdent(b []byte) bool {
+ l := NewLexer(buffer.NewReader(b))
+ l.consumeIdentToken()
+ l.r.Restore()
+ return l.r.Pos() == len(b)
+}
+
+// IsURLUnquoted returns true if the bytes are a valid unquoted URL.
+func IsURLUnquoted(b []byte) bool {
+ l := NewLexer(buffer.NewReader(b))
+ l.consumeUnquotedURL()
+ l.r.Restore()
+ return l.r.Pos() == len(b)
+}
+
+// HSL2RGB converts HSL to RGB with all of range [0,1]
+// from http://www.w3.org/TR/css3-color/#hsl-color
+func HSL2RGB(h, s, l float64) (float64, float64, float64) {
+ m2 := l * (s + 1)
+ if l > 0.5 {
+ m2 = l + s - l*s
+ }
+ m1 := l*2 - m2
+ return hue2rgb(m1, m2, h+1.0/3.0), hue2rgb(m1, m2, h), hue2rgb(m1, m2, h-1.0/3.0)
+}
+
+func hue2rgb(m1, m2, h float64) float64 {
+ if h < 0.0 {
+ h += 1.0
+ }
+ if h > 1.0 {
+ h -= 1.0
+ }
+ if h*6.0 < 1.0 {
+ return m1 + (m2-m1)*h*6.0
+ } else if h*2.0 < 1.0 {
+ return m2
+ } else if h*3.0 < 2.0 {
+ return m1 + (m2-m1)*(2.0/3.0-h)*6.0
+ }
+ return m1
+}
diff --git a/vendor/github.com/tdewolff/parse/css/util_test.go b/vendor/github.com/tdewolff/parse/css/util_test.go
new file mode 100644
index 0000000..9eb5aa9
--- /dev/null
+++ b/vendor/github.com/tdewolff/parse/css/util_test.go
@@ -0,0 +1,34 @@
+package css // import "github.com/tdewolff/parse/css"
+
+import (
+ "testing"
+
+ "github.com/tdewolff/test"
+)
+
+func TestIsIdent(t *testing.T) {
+ test.That(t, IsIdent([]byte("color")))
+ test.That(t, !IsIdent([]byte("4.5")))
+}
+
+func TestIsURLUnquoted(t *testing.T) {
+ test.That(t, IsURLUnquoted([]byte("http://x")))
+ test.That(t, !IsURLUnquoted([]byte(")")))
+}
+
+func TestHsl2Rgb(t *testing.T) {
+ r, g, b := HSL2RGB(0.0, 1.0, 0.5)
+ test.T(t, r, 1.0)
+ test.T(t, g, 0.0)
+ test.T(t, b, 0.0)
+
+ r, g, b = HSL2RGB(1.0, 1.0, 0.5)
+ test.T(t, r, 1.0)
+ test.T(t, g, 0.0)
+ test.T(t, b, 0.0)
+
+ r, g, b = HSL2RGB(0.66, 0.0, 1.0)
+ test.T(t, r, 1.0)
+ test.T(t, g, 1.0)
+ test.T(t, b, 1.0)
+}