init go mod with ldap dependency
This commit is contained in:
parent
d33482fbd2
commit
40431e0823
1246 changed files with 389323 additions and 0 deletions
260
vendor/github.com/caddyserver/caddy/caddyfile/dispenser.go
generated
vendored
Normal file
260
vendor/github.com/caddyserver/caddy/caddyfile/dispenser.go
generated
vendored
Normal file
|
@ -0,0 +1,260 @@
|
|||
// Copyright 2015 Light Code Labs, LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package caddyfile
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Dispenser is a type that dispenses tokens, similarly to a lexer,
|
||||
// except that it can do so with some notion of structure and has
|
||||
// some really convenient methods.
|
||||
type Dispenser struct {
|
||||
filename string
|
||||
tokens []Token
|
||||
cursor int
|
||||
nesting int
|
||||
}
|
||||
|
||||
// NewDispenser returns a Dispenser, ready to use for parsing the given input.
|
||||
func NewDispenser(filename string, input io.Reader) Dispenser {
|
||||
tokens, _ := allTokens(input) // ignoring error because nothing to do with it
|
||||
return Dispenser{
|
||||
filename: filename,
|
||||
tokens: tokens,
|
||||
cursor: -1,
|
||||
}
|
||||
}
|
||||
|
||||
// NewDispenserTokens returns a Dispenser filled with the given tokens.
|
||||
func NewDispenserTokens(filename string, tokens []Token) Dispenser {
|
||||
return Dispenser{
|
||||
filename: filename,
|
||||
tokens: tokens,
|
||||
cursor: -1,
|
||||
}
|
||||
}
|
||||
|
||||
// Next loads the next token. Returns true if a token
|
||||
// was loaded; false otherwise. If false, all tokens
|
||||
// have been consumed.
|
||||
func (d *Dispenser) Next() bool {
|
||||
if d.cursor < len(d.tokens)-1 {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// NextArg loads the next token if it is on the same
|
||||
// line. Returns true if a token was loaded; false
|
||||
// otherwise. If false, all tokens on the line have
|
||||
// been consumed. It handles imported tokens correctly.
|
||||
func (d *Dispenser) NextArg() bool {
|
||||
if d.cursor < 0 {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
if d.cursor >= len(d.tokens) {
|
||||
return false
|
||||
}
|
||||
if d.cursor < len(d.tokens)-1 &&
|
||||
d.tokens[d.cursor].File == d.tokens[d.cursor+1].File &&
|
||||
d.tokens[d.cursor].Line+d.numLineBreaks(d.cursor) == d.tokens[d.cursor+1].Line {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// NextLine loads the next token only if it is not on the same
|
||||
// line as the current token, and returns true if a token was
|
||||
// loaded; false otherwise. If false, there is not another token
|
||||
// or it is on the same line. It handles imported tokens correctly.
|
||||
func (d *Dispenser) NextLine() bool {
|
||||
if d.cursor < 0 {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
if d.cursor >= len(d.tokens) {
|
||||
return false
|
||||
}
|
||||
if d.cursor < len(d.tokens)-1 &&
|
||||
(d.tokens[d.cursor].File != d.tokens[d.cursor+1].File ||
|
||||
d.tokens[d.cursor].Line+d.numLineBreaks(d.cursor) < d.tokens[d.cursor+1].Line) {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// NextBlock can be used as the condition of a for loop
|
||||
// to load the next token as long as it opens a block or
|
||||
// is already in a block. It returns true if a token was
|
||||
// loaded, or false when the block's closing curly brace
|
||||
// was loaded and thus the block ended. Nested blocks are
|
||||
// not supported.
|
||||
func (d *Dispenser) NextBlock() bool {
|
||||
if d.nesting > 0 {
|
||||
d.Next()
|
||||
if d.Val() == "}" {
|
||||
d.nesting--
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
if !d.NextArg() { // block must open on same line
|
||||
return false
|
||||
}
|
||||
if d.Val() != "{" {
|
||||
d.cursor-- // roll back if not opening brace
|
||||
return false
|
||||
}
|
||||
d.Next()
|
||||
if d.Val() == "}" {
|
||||
// Open and then closed right away
|
||||
return false
|
||||
}
|
||||
d.nesting++
|
||||
return true
|
||||
}
|
||||
|
||||
// Val gets the text of the current token. If there is no token
|
||||
// loaded, it returns empty string.
|
||||
func (d *Dispenser) Val() string {
|
||||
if d.cursor < 0 || d.cursor >= len(d.tokens) {
|
||||
return ""
|
||||
}
|
||||
return d.tokens[d.cursor].Text
|
||||
}
|
||||
|
||||
// Line gets the line number of the current token. If there is no token
|
||||
// loaded, it returns 0.
|
||||
func (d *Dispenser) Line() int {
|
||||
if d.cursor < 0 || d.cursor >= len(d.tokens) {
|
||||
return 0
|
||||
}
|
||||
return d.tokens[d.cursor].Line
|
||||
}
|
||||
|
||||
// File gets the filename of the current token. If there is no token loaded,
|
||||
// it returns the filename originally given when parsing started.
|
||||
func (d *Dispenser) File() string {
|
||||
if d.cursor < 0 || d.cursor >= len(d.tokens) {
|
||||
return d.filename
|
||||
}
|
||||
if tokenFilename := d.tokens[d.cursor].File; tokenFilename != "" {
|
||||
return tokenFilename
|
||||
}
|
||||
return d.filename
|
||||
}
|
||||
|
||||
// Args is a convenience function that loads the next arguments
|
||||
// (tokens on the same line) into an arbitrary number of strings
|
||||
// pointed to in targets. If there are fewer tokens available
|
||||
// than string pointers, the remaining strings will not be changed
|
||||
// and false will be returned. If there were enough tokens available
|
||||
// to fill the arguments, then true will be returned.
|
||||
func (d *Dispenser) Args(targets ...*string) bool {
|
||||
enough := true
|
||||
for i := 0; i < len(targets); i++ {
|
||||
if !d.NextArg() {
|
||||
enough = false
|
||||
break
|
||||
}
|
||||
*targets[i] = d.Val()
|
||||
}
|
||||
return enough
|
||||
}
|
||||
|
||||
// RemainingArgs loads any more arguments (tokens on the same line)
|
||||
// into a slice and returns them. Open curly brace tokens also indicate
|
||||
// the end of arguments, and the curly brace is not included in
|
||||
// the return value nor is it loaded.
|
||||
func (d *Dispenser) RemainingArgs() []string {
|
||||
var args []string
|
||||
|
||||
for d.NextArg() {
|
||||
if d.Val() == "{" {
|
||||
d.cursor--
|
||||
break
|
||||
}
|
||||
args = append(args, d.Val())
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
// ArgErr returns an argument error, meaning that another
|
||||
// argument was expected but not found. In other words,
|
||||
// a line break or open curly brace was encountered instead of
|
||||
// an argument.
|
||||
func (d *Dispenser) ArgErr() error {
|
||||
if d.Val() == "{" {
|
||||
return d.Err("Unexpected token '{', expecting argument")
|
||||
}
|
||||
return d.Errf("Wrong argument count or unexpected line ending after '%s'", d.Val())
|
||||
}
|
||||
|
||||
// SyntaxErr creates a generic syntax error which explains what was
|
||||
// found and what was expected.
|
||||
func (d *Dispenser) SyntaxErr(expected string) error {
|
||||
msg := fmt.Sprintf("%s:%d - Syntax error: Unexpected token '%s', expecting '%s'", d.File(), d.Line(), d.Val(), expected)
|
||||
return errors.New(msg)
|
||||
}
|
||||
|
||||
// EOFErr returns an error indicating that the dispenser reached
|
||||
// the end of the input when searching for the next token.
|
||||
func (d *Dispenser) EOFErr() error {
|
||||
return d.Errf("Unexpected EOF")
|
||||
}
|
||||
|
||||
// Err generates a custom parse-time error with a message of msg.
|
||||
func (d *Dispenser) Err(msg string) error {
|
||||
msg = fmt.Sprintf("%s:%d - Error during parsing: %s", d.File(), d.Line(), msg)
|
||||
return errors.New(msg)
|
||||
}
|
||||
|
||||
// Errf is like Err, but for formatted error messages
|
||||
func (d *Dispenser) Errf(format string, args ...interface{}) error {
|
||||
return d.Err(fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
// numLineBreaks counts how many line breaks are in the token
|
||||
// value given by the token index tknIdx. It returns 0 if the
|
||||
// token does not exist or there are no line breaks.
|
||||
func (d *Dispenser) numLineBreaks(tknIdx int) int {
|
||||
if tknIdx < 0 || tknIdx >= len(d.tokens) {
|
||||
return 0
|
||||
}
|
||||
return strings.Count(d.tokens[tknIdx].Text, "\n")
|
||||
}
|
||||
|
||||
// isNewLine determines whether the current token is on a different
|
||||
// line (higher line number) than the previous token. It handles imported
|
||||
// tokens correctly. If there isn't a previous token, it returns true.
|
||||
func (d *Dispenser) isNewLine() bool {
|
||||
if d.cursor < 1 {
|
||||
return true
|
||||
}
|
||||
if d.cursor > len(d.tokens)-1 {
|
||||
return false
|
||||
}
|
||||
return d.tokens[d.cursor-1].File != d.tokens[d.cursor].File ||
|
||||
d.tokens[d.cursor-1].Line+d.numLineBreaks(d.cursor-1) < d.tokens[d.cursor].Line
|
||||
}
|
198
vendor/github.com/caddyserver/caddy/caddyfile/json.go
generated
vendored
Normal file
198
vendor/github.com/caddyserver/caddy/caddyfile/json.go
generated
vendored
Normal file
|
@ -0,0 +1,198 @@
|
|||
// Copyright 2015 Light Code Labs, LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package caddyfile
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const filename = "Caddyfile"
|
||||
|
||||
// ToJSON converts caddyfile to its JSON representation.
|
||||
func ToJSON(caddyfile []byte) ([]byte, error) {
|
||||
var j EncodedCaddyfile
|
||||
|
||||
serverBlocks, err := Parse(filename, bytes.NewReader(caddyfile), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, sb := range serverBlocks {
|
||||
block := EncodedServerBlock{
|
||||
Keys: sb.Keys,
|
||||
Body: [][]interface{}{},
|
||||
}
|
||||
|
||||
// Extract directives deterministically by sorting them
|
||||
var directives = make([]string, len(sb.Tokens))
|
||||
for dir := range sb.Tokens {
|
||||
directives = append(directives, dir)
|
||||
}
|
||||
sort.Strings(directives)
|
||||
|
||||
// Convert each directive's tokens into our JSON structure
|
||||
for _, dir := range directives {
|
||||
disp := NewDispenserTokens(filename, sb.Tokens[dir])
|
||||
for disp.Next() {
|
||||
block.Body = append(block.Body, constructLine(&disp))
|
||||
}
|
||||
}
|
||||
|
||||
// tack this block onto the end of the list
|
||||
j = append(j, block)
|
||||
}
|
||||
|
||||
result, err := json.Marshal(j)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// constructLine transforms tokens into a JSON-encodable structure;
|
||||
// but only one line at a time, to be used at the top-level of
|
||||
// a server block only (where the first token on each line is a
|
||||
// directive) - not to be used at any other nesting level.
|
||||
func constructLine(d *Dispenser) []interface{} {
|
||||
var args []interface{}
|
||||
|
||||
args = append(args, d.Val())
|
||||
|
||||
for d.NextArg() {
|
||||
if d.Val() == "{" {
|
||||
args = append(args, constructBlock(d))
|
||||
continue
|
||||
}
|
||||
args = append(args, d.Val())
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
// constructBlock recursively processes tokens into a
|
||||
// JSON-encodable structure. To be used in a directive's
|
||||
// block. Goes to end of block.
|
||||
func constructBlock(d *Dispenser) [][]interface{} {
|
||||
block := [][]interface{}{}
|
||||
|
||||
for d.Next() {
|
||||
if d.Val() == "}" {
|
||||
break
|
||||
}
|
||||
block = append(block, constructLine(d))
|
||||
}
|
||||
|
||||
return block
|
||||
}
|
||||
|
||||
// FromJSON converts JSON-encoded jsonBytes to Caddyfile text
|
||||
func FromJSON(jsonBytes []byte) ([]byte, error) {
|
||||
var j EncodedCaddyfile
|
||||
var result string
|
||||
|
||||
err := json.Unmarshal(jsonBytes, &j)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for sbPos, sb := range j {
|
||||
if sbPos > 0 {
|
||||
result += "\n\n"
|
||||
}
|
||||
for i, key := range sb.Keys {
|
||||
if i > 0 {
|
||||
result += ", "
|
||||
}
|
||||
//result += standardizeScheme(key)
|
||||
result += key
|
||||
}
|
||||
result += jsonToText(sb.Body, 1)
|
||||
}
|
||||
|
||||
return []byte(result), nil
|
||||
}
|
||||
|
||||
// jsonToText recursively transforms a scope of JSON into plain
|
||||
// Caddyfile text.
|
||||
func jsonToText(scope interface{}, depth int) string {
|
||||
var result string
|
||||
|
||||
switch val := scope.(type) {
|
||||
case string:
|
||||
if strings.ContainsAny(val, "\" \n\t\r") {
|
||||
result += `"` + strings.Replace(val, "\"", "\\\"", -1) + `"`
|
||||
} else {
|
||||
result += val
|
||||
}
|
||||
case int:
|
||||
result += strconv.Itoa(val)
|
||||
case float64:
|
||||
result += fmt.Sprintf("%v", val)
|
||||
case bool:
|
||||
result += fmt.Sprintf("%t", val)
|
||||
case [][]interface{}:
|
||||
result += " {\n"
|
||||
for _, arg := range val {
|
||||
result += strings.Repeat("\t", depth) + jsonToText(arg, depth+1) + "\n"
|
||||
}
|
||||
result += strings.Repeat("\t", depth-1) + "}"
|
||||
case []interface{}:
|
||||
for i, v := range val {
|
||||
if block, ok := v.([]interface{}); ok {
|
||||
result += "{\n"
|
||||
for _, arg := range block {
|
||||
result += strings.Repeat("\t", depth) + jsonToText(arg, depth+1) + "\n"
|
||||
}
|
||||
result += strings.Repeat("\t", depth-1) + "}"
|
||||
continue
|
||||
}
|
||||
result += jsonToText(v, depth)
|
||||
if i < len(val)-1 {
|
||||
result += " "
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// TODO: Will this function come in handy somewhere else?
|
||||
/*
|
||||
// standardizeScheme turns an address like host:https into https://host,
|
||||
// or "host:" into "host".
|
||||
func standardizeScheme(addr string) string {
|
||||
if hostname, port, err := net.SplitHostPort(addr); err == nil {
|
||||
if port == "http" || port == "https" {
|
||||
addr = port + "://" + hostname
|
||||
}
|
||||
}
|
||||
return strings.TrimSuffix(addr, ":")
|
||||
}
|
||||
*/
|
||||
|
||||
// EncodedCaddyfile encapsulates a slice of EncodedServerBlocks.
|
||||
type EncodedCaddyfile []EncodedServerBlock
|
||||
|
||||
// EncodedServerBlock represents a server block ripe for encoding.
|
||||
type EncodedServerBlock struct {
|
||||
Keys []string `json:"keys"`
|
||||
Body [][]interface{} `json:"body"`
|
||||
}
|
150
vendor/github.com/caddyserver/caddy/caddyfile/lexer.go
generated
vendored
Normal file
150
vendor/github.com/caddyserver/caddy/caddyfile/lexer.go
generated
vendored
Normal file
|
@ -0,0 +1,150 @@
|
|||
// Copyright 2015 Light Code Labs, LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package caddyfile
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type (
|
||||
// lexer is a utility which can get values, token by
|
||||
// token, from a Reader. A token is a word, and tokens
|
||||
// are separated by whitespace. A word can be enclosed
|
||||
// in quotes if it contains whitespace.
|
||||
lexer struct {
|
||||
reader *bufio.Reader
|
||||
token Token
|
||||
line int
|
||||
}
|
||||
|
||||
// Token represents a single parsable unit.
|
||||
Token struct {
|
||||
File string
|
||||
Line int
|
||||
Text string
|
||||
}
|
||||
)
|
||||
|
||||
// load prepares the lexer to scan an input for tokens.
|
||||
// It discards any leading byte order mark.
|
||||
func (l *lexer) load(input io.Reader) error {
|
||||
l.reader = bufio.NewReader(input)
|
||||
l.line = 1
|
||||
|
||||
// discard byte order mark, if present
|
||||
firstCh, _, err := l.reader.ReadRune()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if firstCh != 0xFEFF {
|
||||
err := l.reader.UnreadRune()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// next loads the next token into the lexer.
|
||||
// A token is delimited by whitespace, unless
|
||||
// the token starts with a quotes character (")
|
||||
// in which case the token goes until the closing
|
||||
// quotes (the enclosing quotes are not included).
|
||||
// Inside quoted strings, quotes may be escaped
|
||||
// with a preceding \ character. No other chars
|
||||
// may be escaped. The rest of the line is skipped
|
||||
// if a "#" character is read in. Returns true if
|
||||
// a token was loaded; false otherwise.
|
||||
func (l *lexer) next() bool {
|
||||
var val []rune
|
||||
var comment, quoted, escaped bool
|
||||
|
||||
makeToken := func() bool {
|
||||
l.token.Text = string(val)
|
||||
return true
|
||||
}
|
||||
|
||||
for {
|
||||
ch, _, err := l.reader.ReadRune()
|
||||
if err != nil {
|
||||
if len(val) > 0 {
|
||||
return makeToken()
|
||||
}
|
||||
if err == io.EOF {
|
||||
return false
|
||||
}
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if quoted {
|
||||
if !escaped {
|
||||
if ch == '\\' {
|
||||
escaped = true
|
||||
continue
|
||||
} else if ch == '"' {
|
||||
quoted = false
|
||||
return makeToken()
|
||||
}
|
||||
}
|
||||
if ch == '\n' {
|
||||
l.line++
|
||||
}
|
||||
if escaped {
|
||||
// only escape quotes
|
||||
if ch != '"' {
|
||||
val = append(val, '\\')
|
||||
}
|
||||
}
|
||||
val = append(val, ch)
|
||||
escaped = false
|
||||
continue
|
||||
}
|
||||
|
||||
if unicode.IsSpace(ch) {
|
||||
if ch == '\r' {
|
||||
continue
|
||||
}
|
||||
if ch == '\n' {
|
||||
l.line++
|
||||
comment = false
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return makeToken()
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if ch == '#' {
|
||||
comment = true
|
||||
}
|
||||
|
||||
if comment {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(val) == 0 {
|
||||
l.token = Token{Line: l.line}
|
||||
if ch == '"' {
|
||||
quoted = true
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
val = append(val, ch)
|
||||
}
|
||||
}
|
490
vendor/github.com/caddyserver/caddy/caddyfile/parse.go
generated
vendored
Normal file
490
vendor/github.com/caddyserver/caddy/caddyfile/parse.go
generated
vendored
Normal file
|
@ -0,0 +1,490 @@
|
|||
// Copyright 2015 Light Code Labs, LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package caddyfile
|
||||
|
||||
import (
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Parse parses the input just enough to group tokens, in
|
||||
// order, by server block. No further parsing is performed.
|
||||
// Server blocks are returned in the order in which they appear.
|
||||
// Directives that do not appear in validDirectives will cause
|
||||
// an error. If you do not want to check for valid directives,
|
||||
// pass in nil instead.
|
||||
func Parse(filename string, input io.Reader, validDirectives []string) ([]ServerBlock, error) {
|
||||
p := parser{Dispenser: NewDispenser(filename, input), validDirectives: validDirectives}
|
||||
return p.parseAll()
|
||||
}
|
||||
|
||||
// allTokens lexes the entire input, but does not parse it.
|
||||
// It returns all the tokens from the input, unstructured
|
||||
// and in order.
|
||||
func allTokens(input io.Reader) ([]Token, error) {
|
||||
l := new(lexer)
|
||||
err := l.load(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var tokens []Token
|
||||
for l.next() {
|
||||
tokens = append(tokens, l.token)
|
||||
}
|
||||
return tokens, nil
|
||||
}
|
||||
|
||||
type parser struct {
|
||||
Dispenser
|
||||
block ServerBlock // current server block being parsed
|
||||
validDirectives []string // a directive must be valid or it's an error
|
||||
eof bool // if we encounter a valid EOF in a hard place
|
||||
definedSnippets map[string][]Token
|
||||
}
|
||||
|
||||
func (p *parser) parseAll() ([]ServerBlock, error) {
|
||||
var blocks []ServerBlock
|
||||
|
||||
for p.Next() {
|
||||
err := p.parseOne()
|
||||
if err != nil {
|
||||
return blocks, err
|
||||
}
|
||||
if len(p.block.Keys) > 0 {
|
||||
blocks = append(blocks, p.block)
|
||||
}
|
||||
}
|
||||
|
||||
return blocks, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseOne() error {
|
||||
p.block = ServerBlock{Tokens: make(map[string][]Token)}
|
||||
|
||||
return p.begin()
|
||||
}
|
||||
|
||||
func (p *parser) begin() error {
|
||||
if len(p.tokens) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
err := p.addresses()
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if p.eof {
|
||||
// this happens if the Caddyfile consists of only
|
||||
// a line of addresses and nothing else
|
||||
return nil
|
||||
}
|
||||
|
||||
if ok, name := p.isSnippet(); ok {
|
||||
if p.definedSnippets == nil {
|
||||
p.definedSnippets = map[string][]Token{}
|
||||
}
|
||||
if _, found := p.definedSnippets[name]; found {
|
||||
return p.Errf("redeclaration of previously declared snippet %s", name)
|
||||
}
|
||||
// consume all tokens til matched close brace
|
||||
tokens, err := p.snippetTokens()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.definedSnippets[name] = tokens
|
||||
// empty block keys so we don't save this block as a real server.
|
||||
p.block.Keys = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
return p.blockContents()
|
||||
}
|
||||
|
||||
func (p *parser) addresses() error {
|
||||
var expectingAnother bool
|
||||
|
||||
for {
|
||||
tkn := replaceEnvVars(p.Val())
|
||||
|
||||
// special case: import directive replaces tokens during parse-time
|
||||
if tkn == "import" && p.isNewLine() {
|
||||
err := p.doImport()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Open brace definitely indicates end of addresses
|
||||
if tkn == "{" {
|
||||
if expectingAnother {
|
||||
return p.Errf("Expected another address but had '%s' - check for extra comma", tkn)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if tkn != "" { // empty token possible if user typed ""
|
||||
// Trailing comma indicates another address will follow, which
|
||||
// may possibly be on the next line
|
||||
if tkn[len(tkn)-1] == ',' {
|
||||
tkn = tkn[:len(tkn)-1]
|
||||
expectingAnother = true
|
||||
} else {
|
||||
expectingAnother = false // but we may still see another one on this line
|
||||
}
|
||||
|
||||
p.block.Keys = append(p.block.Keys, tkn)
|
||||
}
|
||||
|
||||
// Advance token and possibly break out of loop or return error
|
||||
hasNext := p.Next()
|
||||
if expectingAnother && !hasNext {
|
||||
return p.EOFErr()
|
||||
}
|
||||
if !hasNext {
|
||||
p.eof = true
|
||||
break // EOF
|
||||
}
|
||||
if !expectingAnother && p.isNewLine() {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) blockContents() error {
|
||||
errOpenCurlyBrace := p.openCurlyBrace()
|
||||
if errOpenCurlyBrace != nil {
|
||||
// single-server configs don't need curly braces
|
||||
p.cursor--
|
||||
}
|
||||
|
||||
err := p.directives()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Only look for close curly brace if there was an opening
|
||||
if errOpenCurlyBrace == nil {
|
||||
err = p.closeCurlyBrace()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// directives parses through all the lines for directives
|
||||
// and it expects the next token to be the first
|
||||
// directive. It goes until EOF or closing curly brace
|
||||
// which ends the server block.
|
||||
func (p *parser) directives() error {
|
||||
for p.Next() {
|
||||
// end of server block
|
||||
if p.Val() == "}" {
|
||||
break
|
||||
}
|
||||
|
||||
// special case: import directive replaces tokens during parse-time
|
||||
if p.Val() == "import" {
|
||||
err := p.doImport()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.cursor-- // cursor is advanced when we continue, so roll back one more
|
||||
continue
|
||||
}
|
||||
|
||||
// normal case: parse a directive on this line
|
||||
if err := p.directive(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// doImport swaps out the import directive and its argument
|
||||
// (a total of 2 tokens) with the tokens in the specified file
|
||||
// or globbing pattern. When the function returns, the cursor
|
||||
// is on the token before where the import directive was. In
|
||||
// other words, call Next() to access the first token that was
|
||||
// imported.
|
||||
func (p *parser) doImport() error {
|
||||
// syntax checks
|
||||
if !p.NextArg() {
|
||||
return p.ArgErr()
|
||||
}
|
||||
importPattern := replaceEnvVars(p.Val())
|
||||
if importPattern == "" {
|
||||
return p.Err("Import requires a non-empty filepath")
|
||||
}
|
||||
if p.NextArg() {
|
||||
return p.Err("Import takes only one argument (glob pattern or file)")
|
||||
}
|
||||
// splice out the import directive and its argument (2 tokens total)
|
||||
tokensBefore := p.tokens[:p.cursor-1]
|
||||
tokensAfter := p.tokens[p.cursor+1:]
|
||||
var importedTokens []Token
|
||||
|
||||
// first check snippets. That is a simple, non-recursive replacement
|
||||
if p.definedSnippets != nil && p.definedSnippets[importPattern] != nil {
|
||||
importedTokens = p.definedSnippets[importPattern]
|
||||
} else {
|
||||
// make path relative to the file of the _token_ being processed rather
|
||||
// than current working directory (issue #867) and then use glob to get
|
||||
// list of matching filenames
|
||||
absFile, err := filepath.Abs(p.Dispenser.File())
|
||||
if err != nil {
|
||||
return p.Errf("Failed to get absolute path of file: %s: %v", p.Dispenser.filename, err)
|
||||
}
|
||||
|
||||
var matches []string
|
||||
var globPattern string
|
||||
if !filepath.IsAbs(importPattern) {
|
||||
globPattern = filepath.Join(filepath.Dir(absFile), importPattern)
|
||||
} else {
|
||||
globPattern = importPattern
|
||||
}
|
||||
if strings.Count(globPattern, "*") > 1 || strings.Count(globPattern, "?") > 1 ||
|
||||
(strings.Contains(globPattern, "[") && strings.Contains(globPattern, "]")) {
|
||||
// See issue #2096 - a pattern with many glob expansions can hang for too long
|
||||
return p.Errf("Glob pattern may only contain one wildcard (*), but has others: %s", globPattern)
|
||||
}
|
||||
matches, err = filepath.Glob(globPattern)
|
||||
|
||||
if err != nil {
|
||||
return p.Errf("Failed to use import pattern %s: %v", importPattern, err)
|
||||
}
|
||||
if len(matches) == 0 {
|
||||
if strings.ContainsAny(globPattern, "*?[]") {
|
||||
log.Printf("[WARNING] No files matching import glob pattern: %s", importPattern)
|
||||
} else {
|
||||
return p.Errf("File to import not found: %s", importPattern)
|
||||
}
|
||||
}
|
||||
|
||||
// collect all the imported tokens
|
||||
|
||||
for _, importFile := range matches {
|
||||
newTokens, err := p.doSingleImport(importFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
importedTokens = append(importedTokens, newTokens...)
|
||||
}
|
||||
}
|
||||
|
||||
// splice the imported tokens in the place of the import statement
|
||||
// and rewind cursor so Next() will land on first imported token
|
||||
p.tokens = append(tokensBefore, append(importedTokens, tokensAfter...)...)
|
||||
p.cursor--
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// doSingleImport lexes the individual file at importFile and returns
|
||||
// its tokens or an error, if any.
|
||||
func (p *parser) doSingleImport(importFile string) ([]Token, error) {
|
||||
file, err := os.Open(importFile)
|
||||
if err != nil {
|
||||
return nil, p.Errf("Could not import %s: %v", importFile, err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
if info, err := file.Stat(); err != nil {
|
||||
return nil, p.Errf("Could not import %s: %v", importFile, err)
|
||||
} else if info.IsDir() {
|
||||
return nil, p.Errf("Could not import %s: is a directory", importFile)
|
||||
}
|
||||
|
||||
importedTokens, err := allTokens(file)
|
||||
if err != nil {
|
||||
return nil, p.Errf("Could not read tokens while importing %s: %v", importFile, err)
|
||||
}
|
||||
|
||||
// Tack the file path onto these tokens so errors show the imported file's name
|
||||
// (we use full, absolute path to avoid bugs: issue #1892)
|
||||
filename, err := filepath.Abs(importFile)
|
||||
if err != nil {
|
||||
return nil, p.Errf("Failed to get absolute path of file: %s: %v", p.Dispenser.filename, err)
|
||||
}
|
||||
for i := 0; i < len(importedTokens); i++ {
|
||||
importedTokens[i].File = filename
|
||||
}
|
||||
|
||||
return importedTokens, nil
|
||||
}
|
||||
|
||||
// directive collects tokens until the directive's scope
|
||||
// closes (either end of line or end of curly brace block).
|
||||
// It expects the currently-loaded token to be a directive
|
||||
// (or } that ends a server block). The collected tokens
|
||||
// are loaded into the current server block for later use
|
||||
// by directive setup functions.
|
||||
func (p *parser) directive() error {
|
||||
dir := replaceEnvVars(p.Val())
|
||||
nesting := 0
|
||||
|
||||
// TODO: More helpful error message ("did you mean..." or "maybe you need to install its server type")
|
||||
if !p.validDirective(dir) {
|
||||
return p.Errf("Unknown directive '%s'", dir)
|
||||
}
|
||||
|
||||
// The directive itself is appended as a relevant token
|
||||
p.block.Tokens[dir] = append(p.block.Tokens[dir], p.tokens[p.cursor])
|
||||
|
||||
for p.Next() {
|
||||
if p.Val() == "{" {
|
||||
nesting++
|
||||
} else if p.isNewLine() && nesting == 0 {
|
||||
p.cursor-- // read too far
|
||||
break
|
||||
} else if p.Val() == "}" && nesting > 0 {
|
||||
nesting--
|
||||
} else if p.Val() == "}" && nesting == 0 {
|
||||
return p.Err("Unexpected '}' because no matching opening brace")
|
||||
} else if p.Val() == "import" && p.isNewLine() {
|
||||
if err := p.doImport(); err != nil {
|
||||
return err
|
||||
}
|
||||
p.cursor-- // cursor is advanced when we continue, so roll back one more
|
||||
continue
|
||||
}
|
||||
p.tokens[p.cursor].Text = replaceEnvVars(p.tokens[p.cursor].Text)
|
||||
p.block.Tokens[dir] = append(p.block.Tokens[dir], p.tokens[p.cursor])
|
||||
}
|
||||
|
||||
if nesting > 0 {
|
||||
return p.EOFErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// openCurlyBrace expects the current token to be an
|
||||
// opening curly brace. This acts like an assertion
|
||||
// because it returns an error if the token is not
|
||||
// a opening curly brace. It does NOT advance the token.
|
||||
func (p *parser) openCurlyBrace() error {
|
||||
if p.Val() != "{" {
|
||||
return p.SyntaxErr("{")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// closeCurlyBrace expects the current token to be
|
||||
// a closing curly brace. This acts like an assertion
|
||||
// because it returns an error if the token is not
|
||||
// a closing curly brace. It does NOT advance the token.
|
||||
func (p *parser) closeCurlyBrace() error {
|
||||
if p.Val() != "}" {
|
||||
return p.SyntaxErr("}")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// validDirective returns true if dir is in p.validDirectives.
|
||||
func (p *parser) validDirective(dir string) bool {
|
||||
if p.validDirectives == nil {
|
||||
return true
|
||||
}
|
||||
for _, d := range p.validDirectives {
|
||||
if d == dir {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// replaceEnvVars replaces environment variables that appear in the token
|
||||
// and understands both the $UNIX and %WINDOWS% syntaxes.
|
||||
func replaceEnvVars(s string) string {
|
||||
s = replaceEnvReferences(s, "{%", "%}")
|
||||
s = replaceEnvReferences(s, "{$", "}")
|
||||
return s
|
||||
}
|
||||
|
||||
// replaceEnvReferences performs the actual replacement of env variables
|
||||
// in s, given the placeholder start and placeholder end strings.
|
||||
func replaceEnvReferences(s, refStart, refEnd string) string {
|
||||
index := strings.Index(s, refStart)
|
||||
for index != -1 {
|
||||
endIndex := strings.Index(s[index:], refEnd)
|
||||
if endIndex == -1 {
|
||||
break
|
||||
}
|
||||
|
||||
endIndex += index
|
||||
if endIndex > index+len(refStart) {
|
||||
ref := s[index : endIndex+len(refEnd)]
|
||||
s = strings.Replace(s, ref, os.Getenv(ref[len(refStart):len(ref)-len(refEnd)]), -1)
|
||||
} else {
|
||||
return s
|
||||
}
|
||||
index = strings.Index(s, refStart)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// ServerBlock associates any number of keys (usually addresses
|
||||
// of some sort) with tokens (grouped by directive name).
|
||||
type ServerBlock struct {
|
||||
Keys []string
|
||||
Tokens map[string][]Token
|
||||
}
|
||||
|
||||
func (p *parser) isSnippet() (bool, string) {
|
||||
keys := p.block.Keys
|
||||
// A snippet block is a single key with parens. Nothing else qualifies.
|
||||
if len(keys) == 1 && strings.HasPrefix(keys[0], "(") && strings.HasSuffix(keys[0], ")") {
|
||||
return true, strings.TrimSuffix(keys[0][1:], ")")
|
||||
}
|
||||
return false, ""
|
||||
}
|
||||
|
||||
// read and store everything in a block for later replay.
|
||||
func (p *parser) snippetTokens() ([]Token, error) {
|
||||
// TODO: disallow imports in snippets for simplicity at import time
|
||||
// snippet must have curlies.
|
||||
err := p.openCurlyBrace()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
count := 1
|
||||
tokens := []Token{}
|
||||
for p.Next() {
|
||||
if p.Val() == "}" {
|
||||
count--
|
||||
if count == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if p.Val() == "{" {
|
||||
count++
|
||||
}
|
||||
tokens = append(tokens, p.tokens[p.cursor])
|
||||
}
|
||||
// make sure we're matched up
|
||||
if count != 0 {
|
||||
return nil, p.SyntaxErr("}")
|
||||
}
|
||||
return tokens, nil
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue