Prerender index page
This commit is contained in:
parent
fc643483be
commit
6fedb23363
41 changed files with 5442 additions and 118 deletions
1
vendor/github.com/tdewolff/minify/v2/.gitattributes
generated
vendored
Normal file
1
vendor/github.com/tdewolff/minify/v2/.gitattributes
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
benchmarks/sample_* linguist-generated=true
|
4
vendor/github.com/tdewolff/minify/v2/.gitignore
generated
vendored
Normal file
4
vendor/github.com/tdewolff/minify/v2/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
dist/
|
||||
benchmarks/*
|
||||
!benchmarks/*.go
|
||||
!benchmarks/sample_*
|
29
vendor/github.com/tdewolff/minify/v2/.goreleaser.yml
generated
vendored
Normal file
29
vendor/github.com/tdewolff/minify/v2/.goreleaser.yml
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
builds:
|
||||
- binary: minify
|
||||
main: ./cmd/minify/
|
||||
ldflags: -s -w -X main.Version={{.Version}} -X main.Commit={{.Commit}} -X main.Date={{.Date}}
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
- GO111MODULE=on
|
||||
goos:
|
||||
- linux
|
||||
- windows
|
||||
- darwin
|
||||
- freebsd
|
||||
- netbsd
|
||||
- openbsd
|
||||
goarch:
|
||||
- amd64
|
||||
archive:
|
||||
format: tar.gz
|
||||
format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
name_template: "{{.Binary}}_{{.Version}}_{{.Os}}_{{.Arch}}"
|
||||
files:
|
||||
- README.md
|
||||
- LICENSE.md
|
||||
snapshot:
|
||||
name_template: "devel"
|
||||
release:
|
||||
disable: true
|
5
vendor/github.com/tdewolff/minify/v2/.travis.yml
generated
vendored
Normal file
5
vendor/github.com/tdewolff/minify/v2/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
language: go
|
||||
before_install:
|
||||
- go get github.com/mattn/goveralls
|
||||
script:
|
||||
- goveralls -v -service travis-ci -repotoken $COVERALLS_TOKEN -ignore=cmd/minify/* || go test -v ./...
|
22
vendor/github.com/tdewolff/minify/v2/LICENSE.md
generated
vendored
Normal file
22
vendor/github.com/tdewolff/minify/v2/LICENSE.md
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
Copyright (c) 2015 Taco de Wolff
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
597
vendor/github.com/tdewolff/minify/v2/README.md
generated
vendored
Normal file
597
vendor/github.com/tdewolff/minify/v2/README.md
generated
vendored
Normal file
|
@ -0,0 +1,597 @@
|
|||
# Minify <a name="minify"></a> [](https://travis-ci.org/tdewolff/minify) [](http://godoc.org/github.com/tdewolff/minify) [](https://coveralls.io/github/tdewolff/minify?branch=master) [](https://gitter.im/tdewolff/minify?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
***BE AWARE: YOU NEED GO VERSION 1.9.7+, 1.10.3+, 1.11.X to run the latest release, otherwise use minify@v2.3.6 and parse@v2.3.4***
|
||||
|
||||
---
|
||||
|
||||
**[Online demo](https://go.tacodewolff.nl/minify) if you need to minify files *now*.**
|
||||
|
||||
**[Command line tool](https://github.com/tdewolff/minify/tree/master/cmd/minify) that minifies concurrently and supports watching file changes.**
|
||||
|
||||
**[All releases](https://github.com/tdewolff/minify/releases) for various platforms.**
|
||||
|
||||
---
|
||||
|
||||
Minify is a minifier package written in [Go][1]. It provides HTML5, CSS3, JS, JSON, SVG and XML minifiers and an interface to implement any other minifier. Minification is the process of removing bytes from a file (such as whitespace) without changing its output and therefore shrinking its size and speeding up transmission over the internet and possibly parsing. The implemented minifiers are designed for high performance.
|
||||
|
||||
The core functionality associates mimetypes with minification functions, allowing embedded resources (like CSS or JS within HTML files) to be minified as well. Users can add new implementations that are triggered based on a mimetype (or pattern), or redirect to an external command (like ClosureCompiler, UglifyCSS, ...).
|
||||
|
||||
#### Table of Contents
|
||||
|
||||
- [Minify](#minify)
|
||||
- [Prologue](#prologue)
|
||||
- [Installation](#installation)
|
||||
- [API stability](#api-stability)
|
||||
- [Testing](#testing)
|
||||
- [Performance](#performance)
|
||||
- [HTML](#html)
|
||||
- [Whitespace removal](#whitespace-removal)
|
||||
- [CSS](#css)
|
||||
- [JS](#js)
|
||||
- [JSON](#json)
|
||||
- [SVG](#svg)
|
||||
- [XML](#xml)
|
||||
- [Usage](#usage)
|
||||
- [New](#new)
|
||||
- [From reader](#from-reader)
|
||||
- [From bytes](#from-bytes)
|
||||
- [From string](#from-string)
|
||||
- [To reader](#to-reader)
|
||||
- [To writer](#to-writer)
|
||||
- [Middleware](#middleware)
|
||||
- [Custom minifier](#custom-minifier)
|
||||
- [Mediatypes](#mediatypes)
|
||||
- [Examples](#examples)
|
||||
- [Common minifiers](#common-minifiers)
|
||||
- [Custom minifier](#custom-minifier-example)
|
||||
- [ResponseWriter](#responsewriter)
|
||||
- [Templates](#templates)
|
||||
- [License](#license)
|
||||
|
||||
### Status
|
||||
|
||||
* CSS: **fully implemented**
|
||||
* HTML: **fully implemented**
|
||||
* JS: improved JSmin implementation
|
||||
* JSON: **fully implemented**
|
||||
* SVG: partially implemented; in development
|
||||
* XML: **fully implemented**
|
||||
|
||||
### Roadmap
|
||||
|
||||
- [ ] General speed-up of all minifiers (use ASM for whitespace funcs)
|
||||
- [ ] Improve JS minifiers by shortening variables and proper semicolon omission
|
||||
- [ ] Speed-up SVG minifier, it is very slow
|
||||
- [x] Proper parser error reporting and line number + column information
|
||||
- [ ] Generation of source maps (uncertain, might slow down parsers too much if it cannot run separately nicely)
|
||||
- [ ] Look into compression of images, fonts and other web resources (into package `compress`)?
|
||||
- [ ] Create a cmd to pack webfiles (much like webpack), ie. merging CSS and JS files, inlining small external files, minification and gzipping. This would work on HTML files.
|
||||
- [ ] Create a package to format files, much like `gofmt` for Go files?
|
||||
|
||||
## Prologue
|
||||
Minifiers or bindings to minifiers exist in almost all programming languages. Some implementations are merely using several regular-expressions to trim whitespace and comments (even though regex for parsing HTML/XML is ill-advised, for a good read see [Regular Expressions: Now You Have Two Problems](http://blog.codinghorror.com/regular-expressions-now-you-have-two-problems/)). Some implementations are much more profound, such as the [YUI Compressor](http://yui.github.io/yuicompressor/) and [Google Closure Compiler](https://github.com/google/closure-compiler) for JS. As most existing implementations either use JavaScript, use regexes, and don't focus on performance, they are pretty slow.
|
||||
|
||||
This minifier proves to be that fast and extensive minifier that can handle HTML and any other filetype it may contain (CSS, JS, ...). It is usually orders of magnitude faster than existing minifiers.
|
||||
|
||||
## Installation
|
||||
Run the following command
|
||||
|
||||
go get -u github.com/tdewolff/minify/v2
|
||||
|
||||
or add the following imports and run the project with `go get`
|
||||
``` go
|
||||
import (
|
||||
"github.com/tdewolff/minify/v2"
|
||||
"github.com/tdewolff/minify/v2/css"
|
||||
"github.com/tdewolff/minify/v2/html"
|
||||
"github.com/tdewolff/minify/v2/js"
|
||||
"github.com/tdewolff/minify/v2/json"
|
||||
"github.com/tdewolff/minify/v2/svg"
|
||||
"github.com/tdewolff/minify/v2/xml"
|
||||
)
|
||||
```
|
||||
|
||||
## API stability
|
||||
There is no guarantee for absolute stability, but I take issues and bugs seriously and don't take API changes lightly. The library will be maintained in a compatible way unless vital bugs prevent me from doing so. There has been one API change after v1 which added options support and I took the opportunity to push through some more API clean up as well. There are no plans whatsoever for future API changes.
|
||||
|
||||
## Testing
|
||||
For all subpackages and the imported `parse` and `buffer` packages, test coverage of 100% is pursued. Besides full coverage, the minifiers are [fuzz tested](https://github.com/tdewolff/fuzz) using [github.com/dvyukov/go-fuzz](http://www.github.com/dvyukov/go-fuzz), see [the wiki](https://github.com/tdewolff/minify/wiki) for the most important bugs found by fuzz testing. Furthermore am I working on adding visual testing to ensure that minification doesn't change anything visually. By using the WebKit browser to render the original and minified pages we can check whether any pixel is different.
|
||||
|
||||
These tests ensure that everything works as intended, the code does not crash (whatever the input) and that it doesn't change the final result visually. If you still encounter a bug, please report [here](https://github.com/tdewolff/minify/issues)!
|
||||
|
||||
## Performance
|
||||
The benchmarks directory contains a number of standardized samples used to compare performance between changes. To give an indication of the speed of this library, I've ran the tests on my Thinkpad T460 (i5-6300U quad-core 2.4GHz running Arch Linux) using Go 1.9.2.
|
||||
|
||||
```
|
||||
name time/op
|
||||
CSS/sample_bootstrap.css-4 2.26ms ± 0%
|
||||
CSS/sample_gumby.css-4 2.92ms ± 1%
|
||||
HTML/sample_amazon.html-4 2.33ms ± 2%
|
||||
HTML/sample_bbc.html-4 1.02ms ± 1%
|
||||
HTML/sample_blogpost.html-4 171µs ± 2%
|
||||
HTML/sample_es6.html-4 14.5ms ± 0%
|
||||
HTML/sample_stackoverflow.html-4 2.41ms ± 1%
|
||||
HTML/sample_wikipedia.html-4 4.76ms ± 0%
|
||||
JS/sample_ace.js-4 7.41ms ± 0%
|
||||
JS/sample_dot.js-4 63.7µs ± 0%
|
||||
JS/sample_jquery.js-4 2.99ms ± 0%
|
||||
JS/sample_jqueryui.js-4 5.92ms ± 2%
|
||||
JS/sample_moment.js-4 1.09ms ± 1%
|
||||
JSON/sample_large.json-4 2.95ms ± 0%
|
||||
JSON/sample_testsuite.json-4 1.51ms ± 1%
|
||||
JSON/sample_twitter.json-4 6.75µs ± 1%
|
||||
SVG/sample_arctic.svg-4 62.3ms ± 1%
|
||||
SVG/sample_gopher.svg-4 218µs ± 0%
|
||||
SVG/sample_usa.svg-4 33.1ms ± 3%
|
||||
XML/sample_books.xml-4 36.2µs ± 0%
|
||||
XML/sample_catalog.xml-4 14.9µs ± 0%
|
||||
XML/sample_omg.xml-4 6.31ms ± 1%
|
||||
|
||||
name speed
|
||||
CSS/sample_bootstrap.css-4 60.8MB/s ± 0%
|
||||
CSS/sample_gumby.css-4 63.9MB/s ± 1%
|
||||
HTML/sample_amazon.html-4 203MB/s ± 2%
|
||||
HTML/sample_bbc.html-4 113MB/s ± 1%
|
||||
HTML/sample_blogpost.html-4 123MB/s ± 2%
|
||||
HTML/sample_es6.html-4 70.7MB/s ± 0%
|
||||
HTML/sample_stackoverflow.html-4 85.2MB/s ± 1%
|
||||
HTML/sample_wikipedia.html-4 93.6MB/s ± 0%
|
||||
JS/sample_ace.js-4 86.9MB/s ± 0%
|
||||
JS/sample_dot.js-4 81.0MB/s ± 0%
|
||||
JS/sample_jquery.js-4 82.8MB/s ± 0%
|
||||
JS/sample_jqueryui.js-4 79.3MB/s ± 2%
|
||||
JS/sample_moment.js-4 91.2MB/s ± 1%
|
||||
JSON/sample_large.json-4 258MB/s ± 0%
|
||||
JSON/sample_testsuite.json-4 457MB/s ± 1%
|
||||
JSON/sample_twitter.json-4 226MB/s ± 1%
|
||||
SVG/sample_arctic.svg-4 23.6MB/s ± 1%
|
||||
SVG/sample_gopher.svg-4 26.7MB/s ± 0%
|
||||
SVG/sample_usa.svg-4 30.9MB/s ± 3%
|
||||
XML/sample_books.xml-4 122MB/s ± 0%
|
||||
XML/sample_catalog.xml-4 130MB/s ± 0%
|
||||
XML/sample_omg.xml-4 180MB/s ± 1%
|
||||
```
|
||||
|
||||
## HTML
|
||||
|
||||
HTML (with JS and CSS) minification typically shaves off about 10%.
|
||||
|
||||
The HTML5 minifier uses these minifications:
|
||||
|
||||
- strip unnecessary whitespace and otherwise collapse it to one space (or newline if it originally contained a newline)
|
||||
- strip superfluous quotes, or uses single/double quotes whichever requires fewer escapes
|
||||
- strip default attribute values and attribute boolean values
|
||||
- strip some empty attributes
|
||||
- strip unrequired tags (`html`, `head`, `body`, ...)
|
||||
- strip unrequired end tags (`tr`, `td`, `li`, ... and often `p`)
|
||||
- strip default protocols (`http:`, `https:` and `javascript:`)
|
||||
- strip all comments (including conditional comments, old IE versions are not supported anymore by Microsoft)
|
||||
- shorten `doctype` and `meta` charset
|
||||
- lowercase tags, attributes and some values to enhance gzip compression
|
||||
|
||||
Options:
|
||||
|
||||
- `KeepConditionalComments` preserve all IE conditional comments such as `<!--[if IE 6]><![endif]-->` and `<![if IE 6]><![endif]>`, see https://msdn.microsoft.com/en-us/library/ms537512(v=vs.85).aspx#syntax
|
||||
- `KeepDefaultAttrVals` preserve default attribute values such as `<script type="application/javascript">`
|
||||
- `KeepDocumentTags` preserve `html`, `head` and `body` tags
|
||||
- `KeepEndTags` preserve all end tags
|
||||
- `KeepWhitespace` preserve whitespace between inline tags but still collapse multiple whitespace characters into one
|
||||
|
||||
After recent benchmarking and profiling it became really fast and minifies pages in the 10ms range, making it viable for on-the-fly minification.
|
||||
|
||||
However, be careful when doing on-the-fly minification. Minification typically trims off 10% and does this at worst around about 20MB/s. This means users have to download slower than 2MB/s to make on-the-fly minification worthwhile. This may or may not apply in your situation. Rather use caching!
|
||||
|
||||
### Whitespace removal
|
||||
The whitespace removal mechanism collapses all sequences of whitespace (spaces, newlines, tabs) to a single space. If the sequence contained a newline or carriage return it will collapse into a newline character instead. It trims all text parts (in between tags) depending on whether it was preceded by a space from a previous piece of text and whether it is followed up by a block element or an inline element. In the former case we can omit spaces while for inline elements whitespace has significance.
|
||||
|
||||
Make sure your HTML doesn't depend on whitespace between `block` elements that have been changed to `inline` or `inline-block` elements using CSS. Your layout *should not* depend on those whitespaces as the minifier will remove them. An example is a menu consisting of multiple `<li>` that have `display:inline-block` applied and have whitespace in between them. It is bad practise to rely on whitespace for element positioning anyways!
|
||||
|
||||
## CSS
|
||||
|
||||
Minification typically shaves off about 10%-15%. This CSS minifier will _not_ do structural changes to your stylesheets. Although this could result in smaller files, the complexity is quite high and the risk of breaking website is high too.
|
||||
|
||||
The CSS minifier will only use safe minifications:
|
||||
|
||||
- remove comments and unnecessary whitespace (but keep `/*! ... */` which usually contains the license)
|
||||
- remove trailing semicolons
|
||||
- optimize `margin`, `padding` and `border-width` number of sides
|
||||
- shorten numbers by removing unnecessary `+` and zeros and rewriting with/without exponent
|
||||
- remove dimension and percentage for zero values
|
||||
- remove quotes for URLs
|
||||
- remove quotes for font families and make lowercase
|
||||
- rewrite hex colors to/from color names, or to three digit hex
|
||||
- rewrite `rgb(`, `rgba(`, `hsl(` and `hsla(` colors to hex or name
|
||||
- use four digit hex for alpha values (`transparent` → `#0000`)
|
||||
- replace `normal` and `bold` by numbers for `font-weight` and `font`
|
||||
- replace `none` → `0` for `border`, `background` and `outline`
|
||||
- lowercase all identifiers except classes, IDs and URLs to enhance gzip compression
|
||||
- shorten MS alpha function
|
||||
- rewrite data URIs with base64 or ASCII whichever is shorter
|
||||
- calls minifier for data URI mediatypes, thus you can compress embedded SVG files if you have that minifier attached
|
||||
- shorten aggregate declarations such as `background` and `font`
|
||||
|
||||
It does purposely not use the following techniques:
|
||||
|
||||
- (partially) merge rulesets
|
||||
- (partially) split rulesets
|
||||
- collapse multiple declarations when main declaration is defined within a ruleset (don't put `font-weight` within an already existing `font`, too complex)
|
||||
- remove overwritten properties in ruleset (this not always overwrites it, for example with `!important`)
|
||||
- rewrite properties into one ruleset if possible (like `margin-top`, `margin-right`, `margin-bottom` and `margin-left` → `margin`)
|
||||
- put nested ID selector at the front (`body > div#elem p` → `#elem p`)
|
||||
- rewrite attribute selectors for IDs and classes (`div[id=a]` → `div#a`)
|
||||
- put space after pseudo-selectors (IE6 is old, move on!)
|
||||
|
||||
There are a couple of comparison tables online, such as [CSS Minifier Comparison](http://www.codenothing.com/benchmarks/css-compressor-3.0/full.html), [CSS minifiers comparison](http://www.phpied.com/css-minifiers-comparison/) and [CleanCSS tests](http://goalsmashers.github.io/css-minification-benchmark/). Comparing speed between each, this minifier will usually be between 10x-300x faster than existing implementations, and even rank among the top for minification ratios. It falls short with the purposely not implemented and often unsafe techniques.
|
||||
|
||||
Options:
|
||||
|
||||
- `Decimals` number of decimals to preserve for numbers, `-1` means no trimming
|
||||
- `KeepCSS2` prohibits using CSS3 syntax (such as exponents in numbers, or `rgba(` → `rgb(`), might be incomplete
|
||||
|
||||
## JS
|
||||
|
||||
The JS minifier is pretty basic. It removes comments, whitespace and line breaks whenever it can. It employs all the rules that [JSMin](http://www.crockford.com/javascript/jsmin.html) does too, but has additional improvements. For example the prefix-postfix bug is fixed.
|
||||
|
||||
Common speeds of PHP and JS implementations are about 100-300kB/s (see [Uglify2](http://lisperator.net/uglifyjs/), [Adventures in PHP web asset minimization](https://www.happyassassin.net/2014/12/29/adventures-in-php-web-asset-minimization/)). This implementation or orders of magnitude faster, around ~80MB/s.
|
||||
|
||||
TODO:
|
||||
- shorten local variables / function parameters names
|
||||
- precise semicolon and newline omission
|
||||
|
||||
## JSON
|
||||
|
||||
Minification typically shaves off about 15% of filesize for common indented JSON such as generated by [JSON Generator](http://www.json-generator.com/).
|
||||
|
||||
The JSON minifier only removes whitespace, which is the only thing that can be left out.
|
||||
|
||||
## SVG
|
||||
|
||||
The SVG minifier uses these minifications:
|
||||
|
||||
- trim and collapse whitespace between all tags
|
||||
- strip comments, empty `doctype`, XML prelude, `metadata`
|
||||
- strip SVG version
|
||||
- strip CDATA sections wherever possible
|
||||
- collapse tags with no content to a void tag
|
||||
- minify style tag and attributes with the CSS minifier
|
||||
- minify colors
|
||||
- shorten lengths and numbers and remove default `px` unit
|
||||
- shorten `path` data
|
||||
- convert `rect`, `line`, `polygon`, `polyline` to `path`
|
||||
- use relative or absolute positions in path data whichever is shorter
|
||||
|
||||
TODO:
|
||||
- convert attributes to style attribute whenever shorter
|
||||
- merge path data? (same style and no intersection -- the latter is difficult)
|
||||
|
||||
Options:
|
||||
|
||||
- `Decimals` number of decimals to preserve for numbers, `-1` means no trimming
|
||||
|
||||
## XML
|
||||
|
||||
The XML minifier uses these minifications:
|
||||
|
||||
- strip unnecessary whitespace and otherwise collapse it to one space (or newline if it originally contained a newline)
|
||||
- strip comments
|
||||
- collapse tags with no content to a void tag
|
||||
- strip CDATA sections wherever possible
|
||||
|
||||
Options:
|
||||
|
||||
- `KeepWhitespace` preserve whitespace between inline tags but still collapse multiple whitespace characters into one
|
||||
|
||||
## Usage
|
||||
Any input stream is being buffered by the minification functions. This is how the underlying buffer package inherently works to ensure high performance. The output stream however is not buffered. It is wise to preallocate a buffer as big as the input to which the output is written, or otherwise use `bufio` to buffer to a streaming writer.
|
||||
|
||||
### New
|
||||
Retrieve a minifier struct which holds a map of mediatype → minifier functions.
|
||||
``` go
|
||||
m := minify.New()
|
||||
```
|
||||
|
||||
The following loads all provided minifiers.
|
||||
``` go
|
||||
m := minify.New()
|
||||
m.AddFunc("text/css", css.Minify)
|
||||
m.AddFunc("text/html", html.Minify)
|
||||
m.AddFunc("image/svg+xml", svg.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("^(application|text)/(x-)?(java|ecma)script$"), js.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("[/+]json$"), json.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("[/+]xml$"), xml.Minify)
|
||||
```
|
||||
|
||||
You can set options to several minifiers.
|
||||
``` go
|
||||
m.Add("text/html", &html.Minifier{
|
||||
KeepDefaultAttrVals: true,
|
||||
KeepWhitespace: true,
|
||||
})
|
||||
```
|
||||
|
||||
### From reader
|
||||
Minify from an `io.Reader` to an `io.Writer` for a specific mediatype.
|
||||
``` go
|
||||
if err := m.Minify(mediatype, w, r); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
### From bytes
|
||||
Minify from and to a `[]byte` for a specific mediatype.
|
||||
``` go
|
||||
b, err = m.Bytes(mediatype, b)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
### From string
|
||||
Minify from and to a `string` for a specific mediatype.
|
||||
``` go
|
||||
s, err = m.String(mediatype, s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
### To reader
|
||||
Get a minifying reader for a specific mediatype.
|
||||
``` go
|
||||
mr := m.Reader(mediatype, r)
|
||||
if _, err := mr.Read(b); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
### To writer
|
||||
Get a minifying writer for a specific mediatype. Must be explicitly closed because it uses an `io.Pipe` underneath.
|
||||
``` go
|
||||
mw := m.Writer(mediatype, w)
|
||||
if mw.Write([]byte("input")); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if err := mw.Close(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
### Middleware
|
||||
Minify resources on the fly using middleware. It passes a wrapped response writer to the handler that removes the Content-Length header. The minifier is chosen based on the Content-Type header or, if the header is empty, by the request URI file extension. This is on-the-fly processing, you should preferably cache the results though!
|
||||
``` go
|
||||
fs := http.FileServer(http.Dir("www/"))
|
||||
http.Handle("/", m.Middleware(fs))
|
||||
```
|
||||
|
||||
### Custom minifier
|
||||
Add a minifier for a specific mimetype.
|
||||
``` go
|
||||
type CustomMinifier struct {
|
||||
KeepLineBreaks bool
|
||||
}
|
||||
|
||||
func (c *CustomMinifier) Minify(m *minify.M, w io.Writer, r io.Reader, params map[string]string) error {
|
||||
// ...
|
||||
return nil
|
||||
}
|
||||
|
||||
m.Add(mimetype, &CustomMinifier{KeepLineBreaks: true})
|
||||
// or
|
||||
m.AddRegexp(regexp.MustCompile("/x-custom$"), &CustomMinifier{KeepLineBreaks: true})
|
||||
```
|
||||
|
||||
Add a minify function for a specific mimetype.
|
||||
``` go
|
||||
m.AddFunc(mimetype, func(m *minify.M, w io.Writer, r io.Reader, params map[string]string) error {
|
||||
// ...
|
||||
return nil
|
||||
})
|
||||
m.AddFuncRegexp(regexp.MustCompile("/x-custom$"), func(m *minify.M, w io.Writer, r io.Reader, params map[string]string) error {
|
||||
// ...
|
||||
return nil
|
||||
})
|
||||
```
|
||||
|
||||
Add a command `cmd` with arguments `args` for a specific mimetype.
|
||||
``` go
|
||||
m.AddCmd(mimetype, exec.Command(cmd, args...))
|
||||
m.AddCmdRegexp(regexp.MustCompile("/x-custom$"), exec.Command(cmd, args...))
|
||||
```
|
||||
|
||||
### Mediatypes
|
||||
Using the `params map[string]string` argument one can pass parameters to the minifier such as seen in mediatypes (`type/subtype; key1=val2; key2=val2`). Examples are the encoding or charset of the data. Calling `Minify` will split the mimetype and parameters for the minifiers for you, but `MinifyMimetype` can be used if you already have them split up.
|
||||
|
||||
Minifiers can also be added using a regular expression. For example a minifier with `image/.*` will match any image mime.
|
||||
|
||||
## Examples
|
||||
### Common minifiers
|
||||
Basic example that minifies from stdin to stdout and loads the default HTML, CSS and JS minifiers. Optionally, one can enable `java -jar build/compiler.jar` to run for JS (for example the [ClosureCompiler](https://code.google.com/p/closure-compiler/)). Note that reading the file into a buffer first and writing to a pre-allocated buffer would be faster (but would disable streaming).
|
||||
``` go
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
|
||||
"github.com/tdewolff/minify/v2"
|
||||
"github.com/tdewolff/minify/v2/css"
|
||||
"github.com/tdewolff/minify/v2/html"
|
||||
"github.com/tdewolff/minify/v2/js"
|
||||
"github.com/tdewolff/minify/v2/json"
|
||||
"github.com/tdewolff/minify/v2/svg"
|
||||
"github.com/tdewolff/minify/v2/xml"
|
||||
)
|
||||
|
||||
func main() {
|
||||
m := minify.New()
|
||||
m.AddFunc("text/css", css.Minify)
|
||||
m.AddFunc("text/html", html.Minify)
|
||||
m.AddFunc("image/svg+xml", svg.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("^(application|text)/(x-)?(java|ecma)script$"), js.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("[/+]json$"), json.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("[/+]xml$"), xml.Minify)
|
||||
|
||||
// Or use the following for better minification of JS but lower speed:
|
||||
// m.AddCmdRegexp(regexp.MustCompile("^(application|text)/(x-)?(java|ecma)script$"), exec.Command("java", "-jar", "build/compiler.jar"))
|
||||
|
||||
if err := m.Minify("text/html", os.Stdout, os.Stdin); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### <a name="custom-minifier-example"></a> Custom minifier
|
||||
Custom minifier showing an example that implements the minifier function interface. Within a custom minifier, it is possible to call any minifier function (through `m minify.Minifier`) recursively when dealing with embedded resources.
|
||||
``` go
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"github.com/tdewolff/minify/v2"
|
||||
)
|
||||
|
||||
func main() {
|
||||
m := minify.New()
|
||||
m.AddFunc("text/plain", func(m *minify.M, w io.Writer, r io.Reader, _ map[string]string) error {
|
||||
// remove newlines and spaces
|
||||
rb := bufio.NewReader(r)
|
||||
for {
|
||||
line, err := rb.ReadString('\n')
|
||||
if err != nil && err != io.EOF {
|
||||
return err
|
||||
}
|
||||
if _, errws := io.WriteString(w, strings.Replace(line, " ", "", -1)); errws != nil {
|
||||
return errws
|
||||
}
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
in := "Because my coffee was too cold, I heated it in the microwave."
|
||||
out, err := m.String("text/plain", in)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fmt.Println(out)
|
||||
// Output: Becausemycoffeewastoocold,Iheateditinthemicrowave.
|
||||
}
|
||||
```
|
||||
|
||||
### ResponseWriter
|
||||
#### Middleware
|
||||
``` go
|
||||
func main() {
|
||||
m := minify.New()
|
||||
m.AddFunc("text/css", css.Minify)
|
||||
m.AddFunc("text/html", html.Minify)
|
||||
m.AddFunc("image/svg+xml", svg.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("^(application|text)/(x-)?(java|ecma)script$"), js.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("[/+]json$"), json.Minify)
|
||||
m.AddFuncRegexp(regexp.MustCompile("[/+]xml$"), xml.Minify)
|
||||
|
||||
fs := http.FileServer(http.Dir("www/"))
|
||||
http.Handle("/", m.Middleware(fs))
|
||||
}
|
||||
```
|
||||
|
||||
#### ResponseWriter
|
||||
``` go
|
||||
func Serve(w http.ResponseWriter, r *http.Request) {
|
||||
mw := m.ResponseWriter(w, r)
|
||||
defer mw.Close()
|
||||
w = mw
|
||||
|
||||
http.ServeFile(w, r, path.Join("www", r.URL.Path))
|
||||
}
|
||||
```
|
||||
|
||||
#### Custom response writer
|
||||
ResponseWriter example which returns a ResponseWriter that minifies the content and then writes to the original ResponseWriter. Any write after applying this filter will be minified.
|
||||
``` go
|
||||
type MinifyResponseWriter struct {
|
||||
http.ResponseWriter
|
||||
io.WriteCloser
|
||||
}
|
||||
|
||||
func (m MinifyResponseWriter) Write(b []byte) (int, error) {
|
||||
return m.WriteCloser.Write(b)
|
||||
}
|
||||
|
||||
// MinifyResponseWriter must be closed explicitly by calling site.
|
||||
func MinifyFilter(mediatype string, res http.ResponseWriter) MinifyResponseWriter {
|
||||
m := minify.New()
|
||||
// add minfiers
|
||||
|
||||
mw := m.Writer(mediatype, res)
|
||||
return MinifyResponseWriter{res, mw}
|
||||
}
|
||||
```
|
||||
|
||||
``` go
|
||||
// Usage
|
||||
func(w http.ResponseWriter, req *http.Request) {
|
||||
w = MinifyFilter("text/html", w)
|
||||
if _, err := io.WriteString(w, "<p class="message"> This HTTP response will be minified. </p>"); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if err := w.Close(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
// Output: <p class=message>This HTTP response will be minified.
|
||||
}
|
||||
```
|
||||
|
||||
### Templates
|
||||
|
||||
Here's an example of a replacement for `template.ParseFiles` from `template/html`, which automatically minifies each template before parsing it.
|
||||
|
||||
Be aware that minifying templates will work in most cases but not all. Because the HTML minifier only works for valid HTML5, your template must be valid HTML5 of itself. Template tags are parsed as regular text by the minifier.
|
||||
|
||||
``` go
|
||||
func compileTemplates(filenames ...string) (*template.Template, error) {
|
||||
m := minify.New()
|
||||
m.AddFunc("text/html", html.Minify)
|
||||
|
||||
var tmpl *template.Template
|
||||
for _, filename := range filenames {
|
||||
name := filepath.Base(filename)
|
||||
if tmpl == nil {
|
||||
tmpl = template.New(name)
|
||||
} else {
|
||||
tmpl = tmpl.New(name)
|
||||
}
|
||||
|
||||
b, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
mb, err := m.Bytes("text/html", b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tmpl.Parse(string(mb))
|
||||
}
|
||||
return tmpl, nil
|
||||
}
|
||||
```
|
||||
|
||||
Example usage:
|
||||
|
||||
``` go
|
||||
templates := template.MustCompile(compileTemplates("view.html", "home.html"))
|
||||
```
|
||||
|
||||
## License
|
||||
Released under the [MIT license](LICENSE.md).
|
||||
|
||||
[1]: http://golang.org/ "Go Language"
|
462
vendor/github.com/tdewolff/minify/v2/common.go
generated
vendored
Normal file
462
vendor/github.com/tdewolff/minify/v2/common.go
generated
vendored
Normal file
|
@ -0,0 +1,462 @@
|
|||
package minify // import "github.com/tdewolff/minify"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"net/url"
|
||||
|
||||
"github.com/tdewolff/parse/v2"
|
||||
"github.com/tdewolff/parse/v2/strconv"
|
||||
)
|
||||
|
||||
// Epsilon is the closest number to zero that is not considered to be zero.
|
||||
var Epsilon = 0.00001
|
||||
|
||||
// Mediatype minifies a given mediatype by removing all whitespace.
|
||||
func Mediatype(b []byte) []byte {
|
||||
j := 0
|
||||
start := 0
|
||||
inString := false
|
||||
for i, c := range b {
|
||||
if !inString && parse.IsWhitespace(c) {
|
||||
if start != 0 {
|
||||
j += copy(b[j:], b[start:i])
|
||||
} else {
|
||||
j += i
|
||||
}
|
||||
start = i + 1
|
||||
} else if c == '"' {
|
||||
inString = !inString
|
||||
}
|
||||
}
|
||||
if start != 0 {
|
||||
j += copy(b[j:], b[start:])
|
||||
return parse.ToLower(b[:j])
|
||||
}
|
||||
return parse.ToLower(b)
|
||||
}
|
||||
|
||||
// DataURI minifies a data URI and calls a minifier by the specified mediatype. Specifications: https://www.ietf.org/rfc/rfc2397.txt.
|
||||
func DataURI(m *M, dataURI []byte) []byte {
|
||||
if mediatype, data, err := parse.DataURI(dataURI); err == nil {
|
||||
dataURI, _ = m.Bytes(string(mediatype), data)
|
||||
base64Len := len(";base64") + base64.StdEncoding.EncodedLen(len(dataURI))
|
||||
asciiLen := len(dataURI)
|
||||
for _, c := range dataURI {
|
||||
if 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-' || c == '_' || c == '.' || c == '~' || c == ' ' {
|
||||
asciiLen++
|
||||
} else {
|
||||
asciiLen += 2
|
||||
}
|
||||
if asciiLen > base64Len {
|
||||
break
|
||||
}
|
||||
}
|
||||
if asciiLen > base64Len {
|
||||
encoded := make([]byte, base64Len-len(";base64"))
|
||||
base64.StdEncoding.Encode(encoded, dataURI)
|
||||
dataURI = encoded
|
||||
mediatype = append(mediatype, []byte(";base64")...)
|
||||
} else {
|
||||
dataURI = []byte(url.QueryEscape(string(dataURI)))
|
||||
dataURI = bytes.Replace(dataURI, []byte("\""), []byte("\\\""), -1)
|
||||
}
|
||||
if len("text/plain") <= len(mediatype) && parse.EqualFold(mediatype[:len("text/plain")], []byte("text/plain")) {
|
||||
mediatype = mediatype[len("text/plain"):]
|
||||
}
|
||||
for i := 0; i+len(";charset=us-ascii") <= len(mediatype); i++ {
|
||||
// must start with semicolon and be followed by end of mediatype or semicolon
|
||||
if mediatype[i] == ';' && parse.EqualFold(mediatype[i+1:i+len(";charset=us-ascii")], []byte("charset=us-ascii")) && (i+len(";charset=us-ascii") >= len(mediatype) || mediatype[i+len(";charset=us-ascii")] == ';') {
|
||||
mediatype = append(mediatype[:i], mediatype[i+len(";charset=us-ascii"):]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
dataURI = append(append(append([]byte("data:"), mediatype...), ','), dataURI...)
|
||||
}
|
||||
return dataURI
|
||||
}
|
||||
|
||||
const MaxInt = int(^uint(0) >> 1)
|
||||
const MinInt = -MaxInt - 1
|
||||
|
||||
// Decimal minifies a given byte slice containing a number (see parse.Number) and removes superfluous characters.
|
||||
// It does not parse or output exponents.
|
||||
func Decimal(num []byte, prec int) []byte {
|
||||
// omit first + and register mantissa start and end, whether it's negative and the exponent
|
||||
neg := false
|
||||
start := 0
|
||||
dot := -1
|
||||
end := len(num)
|
||||
if 0 < end && (num[0] == '+' || num[0] == '-') {
|
||||
if num[0] == '-' {
|
||||
neg = true
|
||||
}
|
||||
start++
|
||||
}
|
||||
for i, c := range num[start:] {
|
||||
if c == '.' {
|
||||
dot = start + i
|
||||
break
|
||||
}
|
||||
}
|
||||
if dot == -1 {
|
||||
dot = end
|
||||
}
|
||||
|
||||
// trim leading zeros but leave at least one digit
|
||||
for start < end-1 && num[start] == '0' {
|
||||
start++
|
||||
}
|
||||
// trim trailing zeros
|
||||
i := end - 1
|
||||
for ; i > dot; i-- {
|
||||
if num[i] != '0' {
|
||||
end = i + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
if i == dot {
|
||||
end = dot
|
||||
if start == end {
|
||||
num[start] = '0'
|
||||
return num[start : start+1]
|
||||
}
|
||||
} else if start == end-1 && num[start] == '0' {
|
||||
return num[start:end]
|
||||
}
|
||||
|
||||
// apply precision
|
||||
if prec > -1 && dot+1+prec < end {
|
||||
end = dot + 1 + prec
|
||||
inc := num[end] >= '5'
|
||||
if inc || num[end-1] == '0' {
|
||||
for i := end - 1; i > start; i-- {
|
||||
if i == dot {
|
||||
end--
|
||||
} else if inc {
|
||||
if num[i] == '9' {
|
||||
if i > dot {
|
||||
end--
|
||||
} else {
|
||||
num[i] = '0'
|
||||
}
|
||||
} else {
|
||||
num[i]++
|
||||
inc = false
|
||||
break
|
||||
}
|
||||
} else if i > dot && num[i] == '0' {
|
||||
end--
|
||||
}
|
||||
}
|
||||
}
|
||||
if dot == start && end == start+1 {
|
||||
if inc {
|
||||
num[start] = '1'
|
||||
} else {
|
||||
num[start] = '0'
|
||||
}
|
||||
} else {
|
||||
if dot+1 == end {
|
||||
end--
|
||||
}
|
||||
if inc {
|
||||
if num[start] == '9' {
|
||||
num[start] = '0'
|
||||
copy(num[start+1:], num[start:end])
|
||||
end++
|
||||
num[start] = '1'
|
||||
} else {
|
||||
num[start]++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if neg {
|
||||
start--
|
||||
num[start] = '-'
|
||||
}
|
||||
return num[start:end]
|
||||
}
|
||||
|
||||
// Number minifies a given byte slice containing a number (see parse.Number) and removes superfluous characters.
|
||||
func Number(num []byte, prec int) []byte {
|
||||
// omit first + and register mantissa start and end, whether it's negative and the exponent
|
||||
neg := false
|
||||
start := 0
|
||||
dot := -1
|
||||
end := len(num)
|
||||
origExp := 0
|
||||
if 0 < end && (num[0] == '+' || num[0] == '-') {
|
||||
if num[0] == '-' {
|
||||
neg = true
|
||||
}
|
||||
start++
|
||||
}
|
||||
for i, c := range num[start:] {
|
||||
if c == '.' {
|
||||
dot = start + i
|
||||
} else if c == 'e' || c == 'E' {
|
||||
end = start + i
|
||||
i += start + 1
|
||||
if i < len(num) && num[i] == '+' {
|
||||
i++
|
||||
}
|
||||
if tmpOrigExp, n := strconv.ParseInt(num[i:]); n > 0 && tmpOrigExp >= int64(MinInt) && tmpOrigExp <= int64(MaxInt) {
|
||||
// range checks for when int is 32 bit
|
||||
origExp = int(tmpOrigExp)
|
||||
} else {
|
||||
return num
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if dot == -1 {
|
||||
dot = end
|
||||
}
|
||||
|
||||
// trim leading zeros but leave at least one digit
|
||||
for start < end-1 && num[start] == '0' {
|
||||
start++
|
||||
}
|
||||
// trim trailing zeros
|
||||
i := end - 1
|
||||
for ; i > dot; i-- {
|
||||
if num[i] != '0' {
|
||||
end = i + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
if i == dot {
|
||||
end = dot
|
||||
if start == end {
|
||||
num[start] = '0'
|
||||
return num[start : start+1]
|
||||
}
|
||||
} else if start == end-1 && num[start] == '0' {
|
||||
return num[start:end]
|
||||
}
|
||||
|
||||
// n is the number of significant digits
|
||||
// normExp would be the exponent if it were normalised (0.1 <= f < 1)
|
||||
n := 0
|
||||
normExp := 0
|
||||
if dot == start {
|
||||
for i = dot + 1; i < end; i++ {
|
||||
if num[i] != '0' {
|
||||
n = end - i
|
||||
normExp = dot - i + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if dot == end {
|
||||
normExp = end - start
|
||||
for i = end - 1; i >= start; i-- {
|
||||
if num[i] != '0' {
|
||||
n = i + 1 - start
|
||||
end = i + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
n = end - start - 1
|
||||
normExp = dot - start
|
||||
}
|
||||
|
||||
if origExp < 0 && (normExp < MinInt-origExp || normExp-n < MinInt-origExp) || origExp > 0 && (normExp > MaxInt-origExp || normExp-n > MaxInt-origExp) {
|
||||
return num
|
||||
}
|
||||
normExp += origExp
|
||||
|
||||
// intExp would be the exponent if it were an integer
|
||||
intExp := normExp - n
|
||||
lenIntExp := 1
|
||||
if intExp <= -10 || intExp >= 10 {
|
||||
lenIntExp = strconv.LenInt(int64(intExp))
|
||||
}
|
||||
|
||||
// there are three cases to consider when printing the number
|
||||
// case 1: without decimals and with an exponent (large numbers)
|
||||
// case 2: with decimals and without an exponent (around zero)
|
||||
// case 3: without decimals and with a negative exponent (small numbers)
|
||||
if normExp >= n {
|
||||
// case 1
|
||||
if dot < end {
|
||||
if dot == start {
|
||||
start = end - n
|
||||
} else {
|
||||
// TODO: copy the other part if shorter?
|
||||
copy(num[dot:], num[dot+1:end])
|
||||
end--
|
||||
}
|
||||
}
|
||||
if normExp >= n+3 {
|
||||
num[end] = 'e'
|
||||
end++
|
||||
for i := end + lenIntExp - 1; i >= end; i-- {
|
||||
num[i] = byte(intExp%10) + '0'
|
||||
intExp /= 10
|
||||
}
|
||||
end += lenIntExp
|
||||
} else if normExp == n+2 {
|
||||
num[end] = '0'
|
||||
num[end+1] = '0'
|
||||
end += 2
|
||||
} else if normExp == n+1 {
|
||||
num[end] = '0'
|
||||
end++
|
||||
}
|
||||
} else if normExp >= -lenIntExp-1 {
|
||||
// case 2
|
||||
zeroes := -normExp
|
||||
newDot := 0
|
||||
if zeroes > 0 {
|
||||
// dot placed at the front and add zeroes
|
||||
newDot = end - n - zeroes - 1
|
||||
if newDot != dot {
|
||||
d := start - newDot
|
||||
if d > 0 {
|
||||
if dot < end {
|
||||
// copy original digits behind the dot backwards
|
||||
copy(num[dot+1+d:], num[dot+1:end])
|
||||
if dot > start {
|
||||
// copy original digits before the dot backwards
|
||||
copy(num[start+d+1:], num[start:dot])
|
||||
}
|
||||
} else if dot > start {
|
||||
// copy original digits before the dot backwards
|
||||
copy(num[start+d:], num[start:dot])
|
||||
}
|
||||
newDot = start
|
||||
end += d
|
||||
} else {
|
||||
start += -d
|
||||
}
|
||||
num[newDot] = '.'
|
||||
for i := 0; i < zeroes; i++ {
|
||||
num[newDot+1+i] = '0'
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// placed in the middle
|
||||
if dot == start {
|
||||
// TODO: try if placing at the end reduces copying
|
||||
// when there are zeroes after the dot
|
||||
dot = end - n - 1
|
||||
start = dot
|
||||
} else if dot >= end {
|
||||
// TODO: try if placing at the start reduces copying
|
||||
// when input has no dot in it
|
||||
dot = end
|
||||
end++
|
||||
}
|
||||
newDot = start + normExp
|
||||
if newDot > dot {
|
||||
// copy digits forwards
|
||||
copy(num[dot:], num[dot+1:newDot+1])
|
||||
} else if newDot < dot {
|
||||
// copy digits backwards
|
||||
copy(num[newDot+1:], num[newDot:dot])
|
||||
}
|
||||
num[newDot] = '.'
|
||||
}
|
||||
|
||||
// apply precision
|
||||
dot = newDot
|
||||
if prec > -1 && dot+1+prec < end {
|
||||
end = dot + 1 + prec
|
||||
inc := num[end] >= '5'
|
||||
if inc || num[end-1] == '0' {
|
||||
for i := end - 1; i > start; i-- {
|
||||
if i == dot {
|
||||
end--
|
||||
} else if inc {
|
||||
if num[i] == '9' {
|
||||
if i > dot {
|
||||
end--
|
||||
} else {
|
||||
num[i] = '0'
|
||||
}
|
||||
} else {
|
||||
num[i]++
|
||||
inc = false
|
||||
break
|
||||
}
|
||||
} else if i > dot && num[i] == '0' {
|
||||
end--
|
||||
}
|
||||
}
|
||||
}
|
||||
if dot == start && end == start+1 {
|
||||
if inc {
|
||||
num[start] = '1'
|
||||
} else {
|
||||
num[start] = '0'
|
||||
}
|
||||
} else {
|
||||
if dot+1 == end {
|
||||
end--
|
||||
}
|
||||
if inc {
|
||||
if num[start] == '9' {
|
||||
num[start] = '0'
|
||||
copy(num[start+1:], num[start:end])
|
||||
end++
|
||||
num[start] = '1'
|
||||
} else {
|
||||
num[start]++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// case 3
|
||||
|
||||
// find new end, considering moving numbers to the front, removing the dot and increasing the length of the exponent
|
||||
newEnd := end
|
||||
if dot == start {
|
||||
newEnd = start + n
|
||||
} else {
|
||||
newEnd--
|
||||
}
|
||||
newEnd += 2 + lenIntExp
|
||||
|
||||
exp := intExp
|
||||
lenExp := lenIntExp
|
||||
if newEnd < len(num) {
|
||||
// it saves space to convert the decimal to an integer and decrease the exponent
|
||||
if dot < end {
|
||||
if dot == start {
|
||||
copy(num[start:], num[end-n:end])
|
||||
end = start + n
|
||||
} else {
|
||||
copy(num[dot:], num[dot+1:end])
|
||||
end--
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// it does not save space and will panic, so we revert to the original representation
|
||||
exp = origExp
|
||||
lenExp = 1
|
||||
if origExp <= -10 || origExp >= 10 {
|
||||
lenExp = strconv.LenInt(int64(origExp))
|
||||
}
|
||||
}
|
||||
num[end] = 'e'
|
||||
num[end+1] = '-'
|
||||
end += 2
|
||||
exp = -exp
|
||||
for i := end + lenExp - 1; i >= end; i-- {
|
||||
num[i] = byte(exp%10) + '0'
|
||||
exp /= 10
|
||||
}
|
||||
end += lenExp
|
||||
}
|
||||
|
||||
if neg {
|
||||
start--
|
||||
num[start] = '-'
|
||||
}
|
||||
return num[start:end]
|
||||
}
|
12
vendor/github.com/tdewolff/minify/v2/go.mod
generated
vendored
Normal file
12
vendor/github.com/tdewolff/minify/v2/go.mod
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
module github.com/tdewolff/minify/v2
|
||||
|
||||
require (
|
||||
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 // indirect
|
||||
github.com/dustin/go-humanize v1.0.0
|
||||
github.com/fsnotify/fsnotify v1.4.7
|
||||
github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2
|
||||
github.com/spf13/pflag v1.0.3
|
||||
github.com/tdewolff/parse/v2 v2.3.5
|
||||
github.com/tdewolff/test v1.0.0
|
||||
golang.org/x/sys v0.0.0-20181031143558-9b800f95dbbc // indirect
|
||||
)
|
16
vendor/github.com/tdewolff/minify/v2/go.sum
generated
vendored
Normal file
16
vendor/github.com/tdewolff/minify/v2/go.sum
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764=
|
||||
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927/go.mod h1:h/aW8ynjgkuj+NQRlZcDbAbM1ORAbXjXX77sX7T289U=
|
||||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2 h1:JAEbJn3j/FrhdWA9jW8B5ajsLIjeuEHLi8xE4fk997o=
|
||||
github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2/go.mod h1:0KeJpeMD6o+O4hW7qJOT7vyQPKrWmj26uf5wMc/IiIs=
|
||||
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/tdewolff/parse/v2 v2.3.5 h1:/uS8JfhwVJsNkEh769GM5ENv6L9LOh2Z9uW3tCdlhs0=
|
||||
github.com/tdewolff/parse/v2 v2.3.5/go.mod h1:HansaqmN4I/U7L6/tUp0NcwT2tFO0F4EAWYGSDzkYNk=
|
||||
github.com/tdewolff/test v1.0.0 h1:jOwzqCXr5ePXEPGJaq2ivoR6HOCi+D5TPfpoyg8yvmU=
|
||||
github.com/tdewolff/test v1.0.0/go.mod h1:DiQUlutnqlEvdvhSn2LPGy4TFwRauAaYDsL+683RNX4=
|
||||
golang.org/x/sys v0.0.0-20181031143558-9b800f95dbbc h1:SdCq5U4J+PpbSDIl9bM0V1e1Ug1jsnBkAFvTs1htn7U=
|
||||
golang.org/x/sys v0.0.0-20181031143558-9b800f95dbbc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
131
vendor/github.com/tdewolff/minify/v2/html/buffer.go
generated
vendored
Normal file
131
vendor/github.com/tdewolff/minify/v2/html/buffer.go
generated
vendored
Normal file
|
@ -0,0 +1,131 @@
|
|||
package html // import "github.com/tdewolff/minify/html"
|
||||
|
||||
import (
|
||||
"github.com/tdewolff/parse/v2"
|
||||
"github.com/tdewolff/parse/v2/html"
|
||||
)
|
||||
|
||||
// Token is a single token unit with an attribute value (if given) and hash of the data.
|
||||
type Token struct {
|
||||
html.TokenType
|
||||
Hash html.Hash
|
||||
Data []byte
|
||||
Text []byte
|
||||
AttrVal []byte
|
||||
Traits traits
|
||||
}
|
||||
|
||||
// TokenBuffer is a buffer that allows for token look-ahead.
|
||||
type TokenBuffer struct {
|
||||
l *html.Lexer
|
||||
|
||||
buf []Token
|
||||
pos int
|
||||
|
||||
attrBuffer []*Token
|
||||
}
|
||||
|
||||
// NewTokenBuffer returns a new TokenBuffer.
|
||||
func NewTokenBuffer(l *html.Lexer) *TokenBuffer {
|
||||
return &TokenBuffer{
|
||||
l: l,
|
||||
buf: make([]Token, 0, 8),
|
||||
}
|
||||
}
|
||||
|
||||
func (z *TokenBuffer) read(t *Token) {
|
||||
t.TokenType, t.Data = z.l.Next()
|
||||
t.Text = z.l.Text()
|
||||
if t.TokenType == html.AttributeToken {
|
||||
t.AttrVal = z.l.AttrVal()
|
||||
if len(t.AttrVal) > 1 && (t.AttrVal[0] == '"' || t.AttrVal[0] == '\'') {
|
||||
t.AttrVal = parse.TrimWhitespace(t.AttrVal[1 : len(t.AttrVal)-1]) // quotes will be readded in attribute loop if necessary
|
||||
}
|
||||
t.Hash = html.ToHash(t.Text)
|
||||
t.Traits = attrMap[t.Hash]
|
||||
} else if t.TokenType == html.StartTagToken || t.TokenType == html.EndTagToken {
|
||||
t.AttrVal = nil
|
||||
t.Hash = html.ToHash(t.Text)
|
||||
t.Traits = tagMap[t.Hash]
|
||||
} else {
|
||||
t.AttrVal = nil
|
||||
t.Hash = 0
|
||||
t.Traits = 0
|
||||
}
|
||||
}
|
||||
|
||||
// Peek returns the ith element and possibly does an allocation.
|
||||
// Peeking past an error will panic.
|
||||
func (z *TokenBuffer) Peek(pos int) *Token {
|
||||
pos += z.pos
|
||||
if pos >= len(z.buf) {
|
||||
if len(z.buf) > 0 && z.buf[len(z.buf)-1].TokenType == html.ErrorToken {
|
||||
return &z.buf[len(z.buf)-1]
|
||||
}
|
||||
|
||||
c := cap(z.buf)
|
||||
d := len(z.buf) - z.pos
|
||||
p := pos - z.pos + 1 // required peek length
|
||||
var buf []Token
|
||||
if 2*p > c {
|
||||
buf = make([]Token, 0, 2*c+p)
|
||||
} else {
|
||||
buf = z.buf
|
||||
}
|
||||
copy(buf[:d], z.buf[z.pos:])
|
||||
|
||||
buf = buf[:p]
|
||||
pos -= z.pos
|
||||
for i := d; i < p; i++ {
|
||||
z.read(&buf[i])
|
||||
if buf[i].TokenType == html.ErrorToken {
|
||||
buf = buf[:i+1]
|
||||
pos = i
|
||||
break
|
||||
}
|
||||
}
|
||||
z.pos, z.buf = 0, buf
|
||||
}
|
||||
return &z.buf[pos]
|
||||
}
|
||||
|
||||
// Shift returns the first element and advances position.
|
||||
func (z *TokenBuffer) Shift() *Token {
|
||||
if z.pos >= len(z.buf) {
|
||||
t := &z.buf[:1][0]
|
||||
z.read(t)
|
||||
return t
|
||||
}
|
||||
t := &z.buf[z.pos]
|
||||
z.pos++
|
||||
return t
|
||||
}
|
||||
|
||||
// Attributes extracts the gives attribute hashes from a tag.
|
||||
// It returns in the same order pointers to the requested token data or nil.
|
||||
func (z *TokenBuffer) Attributes(hashes ...html.Hash) []*Token {
|
||||
n := 0
|
||||
for {
|
||||
if t := z.Peek(n); t.TokenType != html.AttributeToken {
|
||||
break
|
||||
}
|
||||
n++
|
||||
}
|
||||
if len(hashes) > cap(z.attrBuffer) {
|
||||
z.attrBuffer = make([]*Token, len(hashes))
|
||||
} else {
|
||||
z.attrBuffer = z.attrBuffer[:len(hashes)]
|
||||
for i := range z.attrBuffer {
|
||||
z.attrBuffer[i] = nil
|
||||
}
|
||||
}
|
||||
for i := z.pos; i < z.pos+n; i++ {
|
||||
attr := &z.buf[i]
|
||||
for j, hash := range hashes {
|
||||
if hash == attr.Hash {
|
||||
z.attrBuffer[j] = attr
|
||||
}
|
||||
}
|
||||
}
|
||||
return z.attrBuffer
|
||||
}
|
457
vendor/github.com/tdewolff/minify/v2/html/html.go
generated
vendored
Normal file
457
vendor/github.com/tdewolff/minify/v2/html/html.go
generated
vendored
Normal file
|
@ -0,0 +1,457 @@
|
|||
// Package html minifies HTML5 following the specifications at http://www.w3.org/TR/html5/syntax.html.
|
||||
package html // import "github.com/tdewolff/minify/html"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
|
||||
"github.com/tdewolff/minify/v2"
|
||||
"github.com/tdewolff/parse/v2"
|
||||
"github.com/tdewolff/parse/v2/buffer"
|
||||
"github.com/tdewolff/parse/v2/html"
|
||||
)
|
||||
|
||||
var (
|
||||
gtBytes = []byte(">")
|
||||
isBytes = []byte("=")
|
||||
spaceBytes = []byte(" ")
|
||||
doctypeBytes = []byte("<!doctype html>")
|
||||
jsMimeBytes = []byte("application/javascript")
|
||||
cssMimeBytes = []byte("text/css")
|
||||
htmlMimeBytes = []byte("text/html")
|
||||
svgMimeBytes = []byte("image/svg+xml")
|
||||
mathMimeBytes = []byte("application/mathml+xml")
|
||||
dataSchemeBytes = []byte("data:")
|
||||
jsSchemeBytes = []byte("javascript:")
|
||||
httpBytes = []byte("http")
|
||||
inlineParams = map[string]string{"inline": "1"}
|
||||
)
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
|
||||
// DefaultMinifier is the default minifier.
|
||||
var DefaultMinifier = &Minifier{}
|
||||
|
||||
// Minifier is an HTML minifier.
|
||||
type Minifier struct {
|
||||
KeepConditionalComments bool
|
||||
KeepDefaultAttrVals bool
|
||||
KeepDocumentTags bool
|
||||
KeepEndTags bool
|
||||
KeepWhitespace bool
|
||||
}
|
||||
|
||||
// Minify minifies HTML data, it reads from r and writes to w.
|
||||
func Minify(m *minify.M, w io.Writer, r io.Reader, params map[string]string) error {
|
||||
return DefaultMinifier.Minify(m, w, r, params)
|
||||
}
|
||||
|
||||
// Minify minifies HTML data, it reads from r and writes to w.
|
||||
func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]string) error {
|
||||
var rawTagHash html.Hash
|
||||
var rawTagMediatype []byte
|
||||
|
||||
omitSpace := true // if true the next leading space is omitted
|
||||
inPre := false
|
||||
|
||||
attrMinifyBuffer := buffer.NewWriter(make([]byte, 0, 64))
|
||||
attrByteBuffer := make([]byte, 0, 64)
|
||||
|
||||
l := html.NewLexer(r)
|
||||
defer l.Restore()
|
||||
|
||||
tb := NewTokenBuffer(l)
|
||||
for {
|
||||
t := *tb.Shift()
|
||||
SWITCH:
|
||||
switch t.TokenType {
|
||||
case html.ErrorToken:
|
||||
if l.Err() == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return l.Err()
|
||||
case html.DoctypeToken:
|
||||
if _, err := w.Write(doctypeBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
case html.CommentToken:
|
||||
if o.KeepConditionalComments && len(t.Text) > 6 && (bytes.HasPrefix(t.Text, []byte("[if ")) || bytes.HasSuffix(t.Text, []byte("[endif]")) || bytes.HasSuffix(t.Text, []byte("[endif]--"))) {
|
||||
// [if ...] is always 7 or more characters, [endif] is only encountered for downlevel-revealed
|
||||
// see https://msdn.microsoft.com/en-us/library/ms537512(v=vs.85).aspx#syntax
|
||||
if bytes.HasPrefix(t.Data, []byte("<!--[if ")) && bytes.HasSuffix(t.Data, []byte("<![endif]-->")) { // downlevel-hidden
|
||||
begin := bytes.IndexByte(t.Data, '>') + 1
|
||||
end := len(t.Data) - len("<![endif]-->")
|
||||
if _, err := w.Write(t.Data[:begin]); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := o.Minify(m, w, buffer.NewReader(t.Data[begin:end]), nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := w.Write(t.Data[end:]); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if _, err := w.Write(t.Data); err != nil { // downlevel-revealed or short downlevel-hidden
|
||||
return err
|
||||
}
|
||||
}
|
||||
case html.SvgToken:
|
||||
if err := m.MinifyMimetype(svgMimeBytes, w, buffer.NewReader(t.Data), nil); err != nil {
|
||||
if err != minify.ErrNotExist {
|
||||
return err
|
||||
} else if _, err := w.Write(t.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case html.MathToken:
|
||||
if err := m.MinifyMimetype(mathMimeBytes, w, buffer.NewReader(t.Data), nil); err != nil {
|
||||
if err != minify.ErrNotExist {
|
||||
return err
|
||||
} else if _, err := w.Write(t.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case html.TextToken:
|
||||
// CSS and JS minifiers for inline code
|
||||
if rawTagHash != 0 {
|
||||
if rawTagHash == html.Style || rawTagHash == html.Script || rawTagHash == html.Iframe {
|
||||
var mimetype []byte
|
||||
var params map[string]string
|
||||
if rawTagHash == html.Iframe {
|
||||
mimetype = htmlMimeBytes
|
||||
} else if len(rawTagMediatype) > 0 {
|
||||
mimetype, params = parse.Mediatype(rawTagMediatype)
|
||||
} else if rawTagHash == html.Script {
|
||||
mimetype = jsMimeBytes
|
||||
} else if rawTagHash == html.Style {
|
||||
mimetype = cssMimeBytes
|
||||
}
|
||||
if err := m.MinifyMimetype(mimetype, w, buffer.NewReader(t.Data), params); err != nil {
|
||||
if err != minify.ErrNotExist {
|
||||
return err
|
||||
} else if _, err := w.Write(t.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else if _, err := w.Write(t.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if inPre {
|
||||
if _, err := w.Write(t.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
t.Data = parse.ReplaceMultipleWhitespace(t.Data)
|
||||
|
||||
// whitespace removal; trim left
|
||||
if omitSpace && (t.Data[0] == ' ' || t.Data[0] == '\n') {
|
||||
t.Data = t.Data[1:]
|
||||
}
|
||||
|
||||
// whitespace removal; trim right
|
||||
omitSpace = false
|
||||
if len(t.Data) == 0 {
|
||||
omitSpace = true
|
||||
} else if t.Data[len(t.Data)-1] == ' ' || t.Data[len(t.Data)-1] == '\n' {
|
||||
omitSpace = true
|
||||
i := 0
|
||||
for {
|
||||
next := tb.Peek(i)
|
||||
// trim if EOF, text token with leading whitespace or block token
|
||||
if next.TokenType == html.ErrorToken {
|
||||
t.Data = t.Data[:len(t.Data)-1]
|
||||
omitSpace = false
|
||||
break
|
||||
} else if next.TokenType == html.TextToken {
|
||||
// this only happens when a comment, doctype or phrasing end tag (only for !o.KeepWhitespace) was in between
|
||||
// remove if the text token starts with a whitespace
|
||||
if len(next.Data) > 0 && parse.IsWhitespace(next.Data[0]) {
|
||||
t.Data = t.Data[:len(t.Data)-1]
|
||||
omitSpace = false
|
||||
}
|
||||
break
|
||||
} else if next.TokenType == html.StartTagToken || next.TokenType == html.EndTagToken {
|
||||
if o.KeepWhitespace {
|
||||
break
|
||||
}
|
||||
// remove when followed up by a block tag
|
||||
if next.Traits&nonPhrasingTag != 0 {
|
||||
t.Data = t.Data[:len(t.Data)-1]
|
||||
omitSpace = false
|
||||
break
|
||||
} else if next.TokenType == html.StartTagToken {
|
||||
break
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := w.Write(t.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case html.StartTagToken, html.EndTagToken:
|
||||
rawTagHash = 0
|
||||
hasAttributes := false
|
||||
if t.TokenType == html.StartTagToken {
|
||||
if next := tb.Peek(0); next.TokenType == html.AttributeToken {
|
||||
hasAttributes = true
|
||||
}
|
||||
if t.Traits&rawTag != 0 {
|
||||
// ignore empty script and style tags
|
||||
if !hasAttributes && (t.Hash == html.Script || t.Hash == html.Style) {
|
||||
if next := tb.Peek(1); next.TokenType == html.EndTagToken {
|
||||
tb.Shift()
|
||||
tb.Shift()
|
||||
break
|
||||
}
|
||||
}
|
||||
rawTagHash = t.Hash
|
||||
rawTagMediatype = nil
|
||||
}
|
||||
} else if t.Hash == html.Template {
|
||||
omitSpace = true // EndTagToken
|
||||
}
|
||||
|
||||
if t.Hash == html.Pre {
|
||||
inPre = t.TokenType == html.StartTagToken
|
||||
}
|
||||
|
||||
// remove superfluous tags, except for html, head and body tags when KeepDocumentTags is set
|
||||
if !hasAttributes && (!o.KeepDocumentTags && (t.Hash == html.Html || t.Hash == html.Head || t.Hash == html.Body) || t.Hash == html.Colgroup) {
|
||||
break
|
||||
} else if t.TokenType == html.EndTagToken {
|
||||
if !o.KeepEndTags {
|
||||
if t.Hash == html.Thead || t.Hash == html.Tbody || t.Hash == html.Tfoot || t.Hash == html.Tr || t.Hash == html.Th || t.Hash == html.Td ||
|
||||
t.Hash == html.Optgroup || t.Hash == html.Option || t.Hash == html.Dd || t.Hash == html.Dt ||
|
||||
t.Hash == html.Li || t.Hash == html.Rb || t.Hash == html.Rt || t.Hash == html.Rtc || t.Hash == html.Rp {
|
||||
break
|
||||
} else if t.Hash == html.P {
|
||||
i := 0
|
||||
for {
|
||||
next := tb.Peek(i)
|
||||
i++
|
||||
// continue if text token is empty or whitespace
|
||||
if next.TokenType == html.TextToken && parse.IsAllWhitespace(next.Data) {
|
||||
continue
|
||||
}
|
||||
if next.TokenType == html.ErrorToken || next.TokenType == html.EndTagToken && next.Traits&keepPTag == 0 || next.TokenType == html.StartTagToken && next.Traits&omitPTag != 0 {
|
||||
break SWITCH // omit p end tag
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if o.KeepWhitespace || t.Traits&objectTag != 0 {
|
||||
omitSpace = false
|
||||
} else if t.Traits&nonPhrasingTag != 0 {
|
||||
omitSpace = true // omit spaces after block elements
|
||||
}
|
||||
|
||||
if len(t.Data) > 3+len(t.Text) {
|
||||
t.Data[2+len(t.Text)] = '>'
|
||||
t.Data = t.Data[:3+len(t.Text)]
|
||||
}
|
||||
if _, err := w.Write(t.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if o.KeepWhitespace || t.Traits&objectTag != 0 {
|
||||
omitSpace = false
|
||||
} else if t.Traits&nonPhrasingTag != 0 {
|
||||
omitSpace = true // omit spaces after block elements
|
||||
}
|
||||
|
||||
if _, err := w.Write(t.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if hasAttributes {
|
||||
if t.Hash == html.Meta {
|
||||
attrs := tb.Attributes(html.Content, html.Http_Equiv, html.Charset, html.Name)
|
||||
if content := attrs[0]; content != nil {
|
||||
if httpEquiv := attrs[1]; httpEquiv != nil {
|
||||
if charset := attrs[2]; charset == nil && parse.EqualFold(httpEquiv.AttrVal, []byte("content-type")) {
|
||||
content.AttrVal = minify.Mediatype(content.AttrVal)
|
||||
if bytes.Equal(content.AttrVal, []byte("text/html;charset=utf-8")) {
|
||||
httpEquiv.Text = nil
|
||||
content.Text = []byte("charset")
|
||||
content.Hash = html.Charset
|
||||
content.AttrVal = []byte("utf-8")
|
||||
}
|
||||
}
|
||||
}
|
||||
if name := attrs[3]; name != nil {
|
||||
if parse.EqualFold(name.AttrVal, []byte("keywords")) {
|
||||
content.AttrVal = bytes.Replace(content.AttrVal, []byte(", "), []byte(","), -1)
|
||||
} else if parse.EqualFold(name.AttrVal, []byte("viewport")) {
|
||||
content.AttrVal = bytes.Replace(content.AttrVal, []byte(" "), []byte(""), -1)
|
||||
for i := 0; i < len(content.AttrVal); i++ {
|
||||
if content.AttrVal[i] == '=' && i+2 < len(content.AttrVal) {
|
||||
i++
|
||||
if n := parse.Number(content.AttrVal[i:]); n > 0 {
|
||||
minNum := minify.Number(content.AttrVal[i:i+n], -1)
|
||||
if len(minNum) < n {
|
||||
copy(content.AttrVal[i:i+len(minNum)], minNum)
|
||||
copy(content.AttrVal[i+len(minNum):], content.AttrVal[i+n:])
|
||||
content.AttrVal = content.AttrVal[:len(content.AttrVal)+len(minNum)-n]
|
||||
}
|
||||
i += len(minNum)
|
||||
}
|
||||
i-- // mitigate for-loop increase
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if t.Hash == html.Script {
|
||||
attrs := tb.Attributes(html.Src, html.Charset)
|
||||
if attrs[0] != nil && attrs[1] != nil {
|
||||
attrs[1].Text = nil
|
||||
}
|
||||
} else if t.Hash == html.Input {
|
||||
attrs := tb.Attributes(html.Type, html.Value)
|
||||
if t, value := attrs[0], attrs[1]; t != nil && value != nil {
|
||||
isRadio := parse.EqualFold(t.AttrVal, []byte("radio"))
|
||||
if !isRadio && len(value.AttrVal) == 0 {
|
||||
value.Text = nil
|
||||
} else if isRadio && parse.EqualFold(value.AttrVal, []byte("on")) {
|
||||
value.Text = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// write attributes
|
||||
htmlEqualIdName := false
|
||||
for {
|
||||
attr := *tb.Shift()
|
||||
if attr.TokenType != html.AttributeToken {
|
||||
break
|
||||
} else if attr.Text == nil {
|
||||
continue // removed attribute
|
||||
}
|
||||
|
||||
if t.Hash == html.A && (attr.Hash == html.Id || attr.Hash == html.Name) {
|
||||
if attr.Hash == html.Id {
|
||||
if name := tb.Attributes(html.Name)[0]; name != nil && bytes.Equal(attr.AttrVal, name.AttrVal) {
|
||||
htmlEqualIdName = true
|
||||
}
|
||||
} else if htmlEqualIdName {
|
||||
continue
|
||||
} else if id := tb.Attributes(html.Id)[0]; id != nil && bytes.Equal(id.AttrVal, attr.AttrVal) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
val := attr.AttrVal
|
||||
if len(val) == 0 && (attr.Hash == html.Class ||
|
||||
attr.Hash == html.Dir ||
|
||||
attr.Hash == html.Id ||
|
||||
attr.Hash == html.Lang ||
|
||||
attr.Hash == html.Name ||
|
||||
attr.Hash == html.Title ||
|
||||
attr.Hash == html.Action && t.Hash == html.Form) {
|
||||
continue // omit empty attribute values
|
||||
}
|
||||
if attr.Traits&caselessAttr != 0 {
|
||||
val = parse.ToLower(val)
|
||||
if attr.Hash == html.Enctype || attr.Hash == html.Codetype || attr.Hash == html.Accept || attr.Hash == html.Type && (t.Hash == html.A || t.Hash == html.Link || t.Hash == html.Object || t.Hash == html.Param || t.Hash == html.Script || t.Hash == html.Style || t.Hash == html.Source) {
|
||||
val = minify.Mediatype(val)
|
||||
}
|
||||
}
|
||||
if rawTagHash != 0 && attr.Hash == html.Type {
|
||||
rawTagMediatype = parse.Copy(val)
|
||||
}
|
||||
|
||||
// default attribute values can be omitted
|
||||
if !o.KeepDefaultAttrVals && (attr.Hash == html.Type && (t.Hash == html.Script && jsMimetypes[string(val)] ||
|
||||
t.Hash == html.Style && bytes.Equal(val, []byte("text/css")) ||
|
||||
t.Hash == html.Link && bytes.Equal(val, []byte("text/css")) ||
|
||||
t.Hash == html.Input && bytes.Equal(val, []byte("text")) ||
|
||||
t.Hash == html.Button && bytes.Equal(val, []byte("submit"))) ||
|
||||
attr.Hash == html.Language && t.Hash == html.Script ||
|
||||
attr.Hash == html.Method && bytes.Equal(val, []byte("get")) ||
|
||||
attr.Hash == html.Enctype && bytes.Equal(val, []byte("application/x-www-form-urlencoded")) ||
|
||||
attr.Hash == html.Colspan && bytes.Equal(val, []byte("1")) ||
|
||||
attr.Hash == html.Rowspan && bytes.Equal(val, []byte("1")) ||
|
||||
attr.Hash == html.Shape && bytes.Equal(val, []byte("rect")) ||
|
||||
attr.Hash == html.Span && bytes.Equal(val, []byte("1")) ||
|
||||
attr.Hash == html.Clear && bytes.Equal(val, []byte("none")) ||
|
||||
attr.Hash == html.Frameborder && bytes.Equal(val, []byte("1")) ||
|
||||
attr.Hash == html.Scrolling && bytes.Equal(val, []byte("auto")) ||
|
||||
attr.Hash == html.Valuetype && bytes.Equal(val, []byte("data")) ||
|
||||
attr.Hash == html.Media && t.Hash == html.Style && bytes.Equal(val, []byte("all"))) {
|
||||
continue
|
||||
}
|
||||
|
||||
// CSS and JS minifiers for attribute inline code
|
||||
if attr.Hash == html.Style {
|
||||
attrMinifyBuffer.Reset()
|
||||
if err := m.MinifyMimetype(cssMimeBytes, attrMinifyBuffer, buffer.NewReader(val), inlineParams); err == nil {
|
||||
val = attrMinifyBuffer.Bytes()
|
||||
} else if err != minify.ErrNotExist {
|
||||
return err
|
||||
}
|
||||
if len(val) == 0 {
|
||||
continue
|
||||
}
|
||||
} else if len(attr.Text) > 2 && attr.Text[0] == 'o' && attr.Text[1] == 'n' {
|
||||
if len(val) >= 11 && parse.EqualFold(val[:11], jsSchemeBytes) {
|
||||
val = val[11:]
|
||||
}
|
||||
attrMinifyBuffer.Reset()
|
||||
if err := m.MinifyMimetype(jsMimeBytes, attrMinifyBuffer, buffer.NewReader(val), nil); err == nil {
|
||||
val = attrMinifyBuffer.Bytes()
|
||||
} else if err != minify.ErrNotExist {
|
||||
return err
|
||||
}
|
||||
if len(val) == 0 {
|
||||
continue
|
||||
}
|
||||
} else if len(val) > 5 && attr.Traits&urlAttr != 0 { // anchors are already handled
|
||||
if parse.EqualFold(val[:4], httpBytes) {
|
||||
if val[4] == ':' {
|
||||
if m.URL != nil && m.URL.Scheme == "http" {
|
||||
val = val[5:]
|
||||
} else {
|
||||
parse.ToLower(val[:4])
|
||||
}
|
||||
} else if (val[4] == 's' || val[4] == 'S') && val[5] == ':' {
|
||||
if m.URL != nil && m.URL.Scheme == "https" {
|
||||
val = val[6:]
|
||||
} else {
|
||||
parse.ToLower(val[:5])
|
||||
}
|
||||
}
|
||||
} else if parse.EqualFold(val[:5], dataSchemeBytes) {
|
||||
val = minify.DataURI(m, val)
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := w.Write(spaceBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := w.Write(attr.Text); err != nil {
|
||||
return err
|
||||
}
|
||||
if len(val) > 0 && attr.Traits&booleanAttr == 0 {
|
||||
if _, err := w.Write(isBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
// no quotes if possible, else prefer single or double depending on which occurs more often in value
|
||||
val = html.EscapeAttrVal(&attrByteBuffer, attr.AttrVal, val)
|
||||
if _, err := w.Write(val); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if _, err := w.Write(gtBytes); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
191
vendor/github.com/tdewolff/minify/v2/html/table.go
generated
vendored
Normal file
191
vendor/github.com/tdewolff/minify/v2/html/table.go
generated
vendored
Normal file
|
@ -0,0 +1,191 @@
|
|||
package html // import "github.com/tdewolff/minify/html"
|
||||
|
||||
import "github.com/tdewolff/parse/v2/html"
|
||||
|
||||
type traits uint8
|
||||
|
||||
const (
|
||||
rawTag traits = 1 << iota
|
||||
nonPhrasingTag
|
||||
objectTag
|
||||
booleanAttr
|
||||
caselessAttr
|
||||
urlAttr
|
||||
omitPTag // omit p end tag if it is followed by this start tag
|
||||
keepPTag // keep p end tag if it is followed by this end tag
|
||||
)
|
||||
|
||||
var tagMap = map[html.Hash]traits{
|
||||
html.A: keepPTag,
|
||||
html.Address: nonPhrasingTag | omitPTag,
|
||||
html.Article: nonPhrasingTag | omitPTag,
|
||||
html.Aside: nonPhrasingTag | omitPTag,
|
||||
html.Audio: objectTag | keepPTag,
|
||||
html.Blockquote: nonPhrasingTag | omitPTag,
|
||||
html.Body: nonPhrasingTag,
|
||||
html.Br: nonPhrasingTag,
|
||||
html.Button: objectTag,
|
||||
html.Canvas: objectTag,
|
||||
html.Caption: nonPhrasingTag,
|
||||
html.Col: nonPhrasingTag,
|
||||
html.Colgroup: nonPhrasingTag,
|
||||
html.Dd: nonPhrasingTag,
|
||||
html.Del: keepPTag,
|
||||
html.Details: omitPTag,
|
||||
html.Div: nonPhrasingTag | omitPTag,
|
||||
html.Dl: nonPhrasingTag | omitPTag,
|
||||
html.Dt: nonPhrasingTag,
|
||||
html.Embed: nonPhrasingTag,
|
||||
html.Fieldset: nonPhrasingTag | omitPTag,
|
||||
html.Figcaption: nonPhrasingTag | omitPTag,
|
||||
html.Figure: nonPhrasingTag | omitPTag,
|
||||
html.Footer: nonPhrasingTag | omitPTag,
|
||||
html.Form: nonPhrasingTag | omitPTag,
|
||||
html.H1: nonPhrasingTag | omitPTag,
|
||||
html.H2: nonPhrasingTag | omitPTag,
|
||||
html.H3: nonPhrasingTag | omitPTag,
|
||||
html.H4: nonPhrasingTag | omitPTag,
|
||||
html.H5: nonPhrasingTag | omitPTag,
|
||||
html.H6: nonPhrasingTag | omitPTag,
|
||||
html.Head: nonPhrasingTag,
|
||||
html.Header: nonPhrasingTag | omitPTag,
|
||||
html.Hgroup: nonPhrasingTag,
|
||||
html.Hr: nonPhrasingTag | omitPTag,
|
||||
html.Html: nonPhrasingTag,
|
||||
html.Iframe: rawTag | objectTag,
|
||||
html.Img: objectTag,
|
||||
html.Input: objectTag,
|
||||
html.Ins: keepPTag,
|
||||
html.Keygen: objectTag,
|
||||
html.Li: nonPhrasingTag,
|
||||
html.Main: nonPhrasingTag | omitPTag,
|
||||
html.Map: keepPTag,
|
||||
html.Math: rawTag,
|
||||
html.Menu: omitPTag,
|
||||
html.Meta: nonPhrasingTag,
|
||||
html.Meter: objectTag,
|
||||
html.Nav: nonPhrasingTag | omitPTag,
|
||||
html.Noscript: nonPhrasingTag | keepPTag,
|
||||
html.Object: objectTag,
|
||||
html.Ol: nonPhrasingTag | omitPTag,
|
||||
html.Output: nonPhrasingTag,
|
||||
html.P: nonPhrasingTag | omitPTag,
|
||||
html.Picture: objectTag,
|
||||
html.Pre: nonPhrasingTag | omitPTag,
|
||||
html.Progress: objectTag,
|
||||
html.Q: objectTag,
|
||||
html.Script: rawTag,
|
||||
html.Section: nonPhrasingTag | omitPTag,
|
||||
html.Select: objectTag,
|
||||
html.Style: rawTag | nonPhrasingTag,
|
||||
html.Svg: rawTag | objectTag,
|
||||
html.Table: nonPhrasingTag | omitPTag,
|
||||
html.Tbody: nonPhrasingTag,
|
||||
html.Td: nonPhrasingTag,
|
||||
html.Textarea: rawTag | objectTag,
|
||||
html.Tfoot: nonPhrasingTag,
|
||||
html.Th: nonPhrasingTag,
|
||||
html.Thead: nonPhrasingTag,
|
||||
html.Title: nonPhrasingTag,
|
||||
html.Tr: nonPhrasingTag,
|
||||
html.Ul: nonPhrasingTag | omitPTag,
|
||||
html.Video: objectTag | keepPTag,
|
||||
}
|
||||
|
||||
var attrMap = map[html.Hash]traits{
|
||||
html.Accept: caselessAttr,
|
||||
html.Accept_Charset: caselessAttr,
|
||||
html.Action: urlAttr,
|
||||
html.Align: caselessAttr,
|
||||
html.Alink: caselessAttr,
|
||||
html.Allowfullscreen: booleanAttr,
|
||||
html.Async: booleanAttr,
|
||||
html.Autofocus: booleanAttr,
|
||||
html.Autoplay: booleanAttr,
|
||||
html.Axis: caselessAttr,
|
||||
html.Background: urlAttr,
|
||||
html.Bgcolor: caselessAttr,
|
||||
html.Charset: caselessAttr,
|
||||
html.Checked: booleanAttr,
|
||||
html.Cite: urlAttr,
|
||||
html.Classid: urlAttr,
|
||||
html.Clear: caselessAttr,
|
||||
html.Codebase: urlAttr,
|
||||
html.Codetype: caselessAttr,
|
||||
html.Color: caselessAttr,
|
||||
html.Compact: booleanAttr,
|
||||
html.Controls: booleanAttr,
|
||||
html.Data: urlAttr,
|
||||
html.Declare: booleanAttr,
|
||||
html.Default: booleanAttr,
|
||||
html.DefaultChecked: booleanAttr,
|
||||
html.DefaultMuted: booleanAttr,
|
||||
html.DefaultSelected: booleanAttr,
|
||||
html.Defer: booleanAttr,
|
||||
html.Dir: caselessAttr,
|
||||
html.Disabled: booleanAttr,
|
||||
html.Enabled: booleanAttr,
|
||||
html.Enctype: caselessAttr,
|
||||
html.Face: caselessAttr,
|
||||
html.Formaction: urlAttr,
|
||||
html.Formnovalidate: booleanAttr,
|
||||
html.Frame: caselessAttr,
|
||||
html.Hidden: booleanAttr,
|
||||
html.Href: urlAttr,
|
||||
html.Hreflang: caselessAttr,
|
||||
html.Http_Equiv: caselessAttr,
|
||||
html.Icon: urlAttr,
|
||||
html.Inert: booleanAttr,
|
||||
html.Ismap: booleanAttr,
|
||||
html.Itemscope: booleanAttr,
|
||||
html.Lang: caselessAttr,
|
||||
html.Language: caselessAttr,
|
||||
html.Link: caselessAttr,
|
||||
html.Longdesc: urlAttr,
|
||||
html.Manifest: urlAttr,
|
||||
html.Media: caselessAttr,
|
||||
html.Method: caselessAttr,
|
||||
html.Multiple: booleanAttr,
|
||||
html.Muted: booleanAttr,
|
||||
html.Nohref: booleanAttr,
|
||||
html.Noresize: booleanAttr,
|
||||
html.Noshade: booleanAttr,
|
||||
html.Novalidate: booleanAttr,
|
||||
html.Nowrap: booleanAttr,
|
||||
html.Open: booleanAttr,
|
||||
html.Pauseonexit: booleanAttr,
|
||||
html.Poster: urlAttr,
|
||||
html.Profile: urlAttr,
|
||||
html.Readonly: booleanAttr,
|
||||
html.Rel: caselessAttr,
|
||||
html.Required: booleanAttr,
|
||||
html.Rev: caselessAttr,
|
||||
html.Reversed: booleanAttr,
|
||||
html.Rules: caselessAttr,
|
||||
html.Scope: caselessAttr,
|
||||
html.Scoped: booleanAttr,
|
||||
html.Scrolling: caselessAttr,
|
||||
html.Seamless: booleanAttr,
|
||||
html.Selected: booleanAttr,
|
||||
html.Shape: caselessAttr,
|
||||
html.Sortable: booleanAttr,
|
||||
html.Src: urlAttr,
|
||||
html.Target: caselessAttr,
|
||||
html.Text: caselessAttr,
|
||||
html.Translate: booleanAttr,
|
||||
html.Truespeed: booleanAttr,
|
||||
html.Type: caselessAttr,
|
||||
html.Typemustmatch: booleanAttr,
|
||||
html.Undeterminate: booleanAttr,
|
||||
html.Usemap: urlAttr,
|
||||
html.Valign: caselessAttr,
|
||||
html.Valuetype: caselessAttr,
|
||||
html.Vlink: caselessAttr,
|
||||
html.Visible: booleanAttr,
|
||||
html.Xmlns: urlAttr,
|
||||
}
|
||||
|
||||
var jsMimetypes = map[string]bool{
|
||||
"text/javascript": true,
|
||||
"application/javascript": true,
|
||||
}
|
279
vendor/github.com/tdewolff/minify/v2/minify.go
generated
vendored
Normal file
279
vendor/github.com/tdewolff/minify/v2/minify.go
generated
vendored
Normal file
|
@ -0,0 +1,279 @@
|
|||
// Package minify relates MIME type to minifiers. Several minifiers are provided in the subpackages.
|
||||
package minify // import "github.com/tdewolff/minify"
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os/exec"
|
||||
"path"
|
||||
"regexp"
|
||||
"sync"
|
||||
|
||||
"github.com/tdewolff/parse/v2"
|
||||
"github.com/tdewolff/parse/v2/buffer"
|
||||
)
|
||||
|
||||
// ErrNotExist is returned when no minifier exists for a given mimetype.
|
||||
var ErrNotExist = errors.New("minifier does not exist for mimetype")
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
|
||||
// MinifierFunc is a function that implements Minifer.
|
||||
type MinifierFunc func(*M, io.Writer, io.Reader, map[string]string) error
|
||||
|
||||
// Minify calls f(m, w, r, params)
|
||||
func (f MinifierFunc) Minify(m *M, w io.Writer, r io.Reader, params map[string]string) error {
|
||||
return f(m, w, r, params)
|
||||
}
|
||||
|
||||
// Minifier is the interface for minifiers.
|
||||
// The *M parameter is used for minifying embedded resources, such as JS within HTML.
|
||||
type Minifier interface {
|
||||
Minify(*M, io.Writer, io.Reader, map[string]string) error
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
|
||||
type patternMinifier struct {
|
||||
pattern *regexp.Regexp
|
||||
Minifier
|
||||
}
|
||||
|
||||
type cmdMinifier struct {
|
||||
cmd *exec.Cmd
|
||||
}
|
||||
|
||||
func (c *cmdMinifier) Minify(_ *M, w io.Writer, r io.Reader, _ map[string]string) error {
|
||||
cmd := &exec.Cmd{}
|
||||
*cmd = *c.cmd // concurrency safety
|
||||
cmd.Stdout = w
|
||||
cmd.Stdin = r
|
||||
return cmd.Run()
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
|
||||
// M holds a map of mimetype => function to allow recursive minifier calls of the minifier functions.
|
||||
type M struct {
|
||||
literal map[string]Minifier
|
||||
pattern []patternMinifier
|
||||
|
||||
URL *url.URL
|
||||
}
|
||||
|
||||
// New returns a new M.
|
||||
func New() *M {
|
||||
return &M{
|
||||
map[string]Minifier{},
|
||||
[]patternMinifier{},
|
||||
nil,
|
||||
}
|
||||
}
|
||||
|
||||
// Add adds a minifier to the mimetype => function map (unsafe for concurrent use).
|
||||
func (m *M) Add(mimetype string, minifier Minifier) {
|
||||
m.literal[mimetype] = minifier
|
||||
}
|
||||
|
||||
// AddFunc adds a minify function to the mimetype => function map (unsafe for concurrent use).
|
||||
func (m *M) AddFunc(mimetype string, minifier MinifierFunc) {
|
||||
m.literal[mimetype] = minifier
|
||||
}
|
||||
|
||||
// AddRegexp adds a minifier to the mimetype => function map (unsafe for concurrent use).
|
||||
func (m *M) AddRegexp(pattern *regexp.Regexp, minifier Minifier) {
|
||||
m.pattern = append(m.pattern, patternMinifier{pattern, minifier})
|
||||
}
|
||||
|
||||
// AddFuncRegexp adds a minify function to the mimetype => function map (unsafe for concurrent use).
|
||||
func (m *M) AddFuncRegexp(pattern *regexp.Regexp, minifier MinifierFunc) {
|
||||
m.pattern = append(m.pattern, patternMinifier{pattern, minifier})
|
||||
}
|
||||
|
||||
// AddCmd adds a minify function to the mimetype => function map (unsafe for concurrent use) that executes a command to process the minification.
|
||||
// It allows the use of external tools like ClosureCompiler, UglifyCSS, etc. for a specific mimetype.
|
||||
func (m *M) AddCmd(mimetype string, cmd *exec.Cmd) {
|
||||
m.literal[mimetype] = &cmdMinifier{cmd}
|
||||
}
|
||||
|
||||
// AddCmdRegexp adds a minify function to the mimetype => function map (unsafe for concurrent use) that executes a command to process the minification.
|
||||
// It allows the use of external tools like ClosureCompiler, UglifyCSS, etc. for a specific mimetype regular expression.
|
||||
func (m *M) AddCmdRegexp(pattern *regexp.Regexp, cmd *exec.Cmd) {
|
||||
m.pattern = append(m.pattern, patternMinifier{pattern, &cmdMinifier{cmd}})
|
||||
}
|
||||
|
||||
// Match returns the pattern and minifier that gets matched with the mediatype.
|
||||
// It returns nil when no matching minifier exists.
|
||||
// It has the same matching algorithm as Minify.
|
||||
func (m *M) Match(mediatype string) (string, map[string]string, MinifierFunc) {
|
||||
mimetype, params := parse.Mediatype([]byte(mediatype))
|
||||
if minifier, ok := m.literal[string(mimetype)]; ok { // string conversion is optimized away
|
||||
return string(mimetype), params, minifier.Minify
|
||||
}
|
||||
|
||||
for _, minifier := range m.pattern {
|
||||
if minifier.pattern.Match(mimetype) {
|
||||
return minifier.pattern.String(), params, minifier.Minify
|
||||
}
|
||||
}
|
||||
return string(mimetype), params, nil
|
||||
}
|
||||
|
||||
// Minify minifies the content of a Reader and writes it to a Writer (safe for concurrent use).
|
||||
// An error is returned when no such mimetype exists (ErrNotExist) or when an error occurred in the minifier function.
|
||||
// Mediatype may take the form of 'text/plain', 'text/*', '*/*' or 'text/plain; charset=UTF-8; version=2.0'.
|
||||
func (m *M) Minify(mediatype string, w io.Writer, r io.Reader) error {
|
||||
mimetype, params := parse.Mediatype([]byte(mediatype))
|
||||
return m.MinifyMimetype(mimetype, w, r, params)
|
||||
}
|
||||
|
||||
// MinifyMimetype minifies the content of a Reader and writes it to a Writer (safe for concurrent use).
|
||||
// It is a lower level version of Minify and requires the mediatype to be split up into mimetype and parameters.
|
||||
// It is mostly used internally by minifiers because it is faster (no need to convert a byte-slice to string and vice versa).
|
||||
func (m *M) MinifyMimetype(mimetype []byte, w io.Writer, r io.Reader, params map[string]string) error {
|
||||
err := ErrNotExist
|
||||
if minifier, ok := m.literal[string(mimetype)]; ok { // string conversion is optimized away
|
||||
err = minifier.Minify(m, w, r, params)
|
||||
} else {
|
||||
for _, minifier := range m.pattern {
|
||||
if minifier.pattern.Match(mimetype) {
|
||||
err = minifier.Minify(m, w, r, params)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Bytes minifies an array of bytes (safe for concurrent use). When an error occurs it return the original array and the error.
|
||||
// It returns an error when no such mimetype exists (ErrNotExist) or any error occurred in the minifier function.
|
||||
func (m *M) Bytes(mediatype string, v []byte) ([]byte, error) {
|
||||
out := buffer.NewWriter(make([]byte, 0, len(v)))
|
||||
if err := m.Minify(mediatype, out, buffer.NewReader(v)); err != nil {
|
||||
return v, err
|
||||
}
|
||||
return out.Bytes(), nil
|
||||
}
|
||||
|
||||
// String minifies a string (safe for concurrent use). When an error occurs it return the original string and the error.
|
||||
// It returns an error when no such mimetype exists (ErrNotExist) or any error occurred in the minifier function.
|
||||
func (m *M) String(mediatype string, v string) (string, error) {
|
||||
out := buffer.NewWriter(make([]byte, 0, len(v)))
|
||||
if err := m.Minify(mediatype, out, buffer.NewReader([]byte(v))); err != nil {
|
||||
return v, err
|
||||
}
|
||||
return string(out.Bytes()), nil
|
||||
}
|
||||
|
||||
// Reader wraps a Reader interface and minifies the stream.
|
||||
// Errors from the minifier are returned by the reader.
|
||||
func (m *M) Reader(mediatype string, r io.Reader) io.Reader {
|
||||
pr, pw := io.Pipe()
|
||||
go func() {
|
||||
if err := m.Minify(mediatype, pw, r); err != nil {
|
||||
pw.CloseWithError(err)
|
||||
} else {
|
||||
pw.Close()
|
||||
}
|
||||
}()
|
||||
return pr
|
||||
}
|
||||
|
||||
// minifyWriter makes sure that errors from the minifier are passed down through Close (can be blocking).
|
||||
type minifyWriter struct {
|
||||
pw *io.PipeWriter
|
||||
wg sync.WaitGroup
|
||||
err error
|
||||
}
|
||||
|
||||
// Write intercepts any writes to the writer.
|
||||
func (w *minifyWriter) Write(b []byte) (int, error) {
|
||||
return w.pw.Write(b)
|
||||
}
|
||||
|
||||
// Close must be called when writing has finished. It returns the error from the minifier.
|
||||
func (w *minifyWriter) Close() error {
|
||||
w.pw.Close()
|
||||
w.wg.Wait()
|
||||
return w.err
|
||||
}
|
||||
|
||||
// Writer wraps a Writer interface and minifies the stream.
|
||||
// Errors from the minifier are returned by Close on the writer.
|
||||
// The writer must be closed explicitly.
|
||||
func (m *M) Writer(mediatype string, w io.Writer) *minifyWriter {
|
||||
pr, pw := io.Pipe()
|
||||
mw := &minifyWriter{pw, sync.WaitGroup{}, nil}
|
||||
mw.wg.Add(1)
|
||||
go func() {
|
||||
defer mw.wg.Done()
|
||||
|
||||
if err := m.Minify(mediatype, w, pr); err != nil {
|
||||
io.Copy(w, pr)
|
||||
mw.err = err
|
||||
}
|
||||
pr.Close()
|
||||
}()
|
||||
return mw
|
||||
}
|
||||
|
||||
// minifyResponseWriter wraps an http.ResponseWriter and makes sure that errors from the minifier are passed down through Close (can be blocking).
|
||||
// All writes to the response writer are intercepted and minified on the fly.
|
||||
// http.ResponseWriter loses all functionality such as Pusher, Hijacker, Flusher, ...
|
||||
type minifyResponseWriter struct {
|
||||
http.ResponseWriter
|
||||
|
||||
writer *minifyWriter
|
||||
m *M
|
||||
mediatype string
|
||||
}
|
||||
|
||||
// WriteHeader intercepts any header writes and removes the Content-Length header.
|
||||
func (w *minifyResponseWriter) WriteHeader(status int) {
|
||||
w.ResponseWriter.Header().Del("Content-Length")
|
||||
w.ResponseWriter.WriteHeader(status)
|
||||
}
|
||||
|
||||
// Write intercepts any writes to the response writer.
|
||||
// The first write will extract the Content-Type as the mediatype. Otherwise it falls back to the RequestURI extension.
|
||||
func (w *minifyResponseWriter) Write(b []byte) (int, error) {
|
||||
if w.writer == nil {
|
||||
// first write
|
||||
if mediatype := w.ResponseWriter.Header().Get("Content-Type"); mediatype != "" {
|
||||
w.mediatype = mediatype
|
||||
}
|
||||
w.writer = w.m.Writer(w.mediatype, w.ResponseWriter)
|
||||
}
|
||||
return w.writer.Write(b)
|
||||
}
|
||||
|
||||
// Close must be called when writing has finished. It returns the error from the minifier.
|
||||
func (w *minifyResponseWriter) Close() error {
|
||||
if w.writer != nil {
|
||||
return w.writer.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ResponseWriter minifies any writes to the http.ResponseWriter.
|
||||
// http.ResponseWriter loses all functionality such as Pusher, Hijacker, Flusher, ...
|
||||
// Minification might be slower than just sending the original file! Caching is advised.
|
||||
func (m *M) ResponseWriter(w http.ResponseWriter, r *http.Request) *minifyResponseWriter {
|
||||
mediatype := mime.TypeByExtension(path.Ext(r.RequestURI))
|
||||
return &minifyResponseWriter{w, nil, m, mediatype}
|
||||
}
|
||||
|
||||
// Middleware provides a middleware function that minifies content on the fly by intercepting writes to http.ResponseWriter.
|
||||
// http.ResponseWriter loses all functionality such as Pusher, Hijacker, Flusher, ...
|
||||
// Minification might be slower than just sending the original file! Caching is advised.
|
||||
func (m *M) Middleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
mw := m.ResponseWriter(w, r)
|
||||
defer mw.Close()
|
||||
|
||||
next.ServeHTTP(mw, r)
|
||||
})
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue