diff options
author | 2016-08-19 17:14:17 -0700 | |
---|---|---|
committer | 2016-08-19 17:14:17 -0700 | |
commit | 9ac3cab1b7b1b1e78f86ce3c6a80fbee312162e6 (patch) | |
tree | 437e9755927c33af16276ad2602a6da115f948cb /core/parse/lexer.go | |
parent | a1989c35231b0e5ea271b2f68d82c1a63e697cd0 (diff) | |
download | coredns-9ac3cab1b7b1b1e78f86ce3c6a80fbee312162e6.tar.gz coredns-9ac3cab1b7b1b1e78f86ce3c6a80fbee312162e6.tar.zst coredns-9ac3cab1b7b1b1e78f86ce3c6a80fbee312162e6.zip |
Make CoreDNS a server type plugin for Caddy (#220)
* Make CoreDNS a server type plugin for Caddy
Remove code we don't need and port all middleware over. Fix all tests
and rework the documentation.
Also make `go generate` build a caddy binary which we then copy into
our directory. This means `go build`-builds remain working as-is.
And new etc instances in each etcd test for better isolation.
Fix more tests and rework test.Server with the newer support Caddy offers.
Fix Makefile to support new mode of operation.
Diffstat (limited to 'core/parse/lexer.go')
-rw-r--r-- | core/parse/lexer.go | 122 |
1 files changed, 0 insertions, 122 deletions
diff --git a/core/parse/lexer.go b/core/parse/lexer.go deleted file mode 100644 index d2939eba2..000000000 --- a/core/parse/lexer.go +++ /dev/null @@ -1,122 +0,0 @@ -package parse - -import ( - "bufio" - "io" - "unicode" -) - -type ( - // lexer is a utility which can get values, token by - // token, from a Reader. A token is a word, and tokens - // are separated by whitespace. A word can be enclosed - // in quotes if it contains whitespace. - lexer struct { - reader *bufio.Reader - token token - line int - } - - // token represents a single parsable unit. - token struct { - file string - line int - text string - } -) - -// load prepares the lexer to scan an input for tokens. -func (l *lexer) load(input io.Reader) error { - l.reader = bufio.NewReader(input) - l.line = 1 - return nil -} - -// next loads the next token into the lexer. -// A token is delimited by whitespace, unless -// the token starts with a quotes character (") -// in which case the token goes until the closing -// quotes (the enclosing quotes are not included). -// Inside quoted strings, quotes may be escaped -// with a preceding \ character. No other chars -// may be escaped. The rest of the line is skipped -// if a "#" character is read in. Returns true if -// a token was loaded; false otherwise. -func (l *lexer) next() bool { - var val []rune - var comment, quoted, escaped bool - - makeToken := func() bool { - l.token.text = string(val) - return true - } - - for { - ch, _, err := l.reader.ReadRune() - if err != nil { - if len(val) > 0 { - return makeToken() - } - if err == io.EOF { - return false - } - panic(err) - } - - if quoted { - if !escaped { - if ch == '\\' { - escaped = true - continue - } else if ch == '"' { - quoted = false - return makeToken() - } - } - if ch == '\n' { - l.line++ - } - if escaped { - // only escape quotes - if ch != '"' { - val = append(val, '\\') - } - } - val = append(val, ch) - escaped = false - continue - } - - if unicode.IsSpace(ch) { - if ch == '\r' { - continue - } - if ch == '\n' { - l.line++ - comment = false - } - if len(val) > 0 { - return makeToken() - } - continue - } - - if ch == '#' { - comment = true - } - - if comment { - continue - } - - if len(val) == 0 { - l.token = token{line: l.line} - if ch == '"' { - quoted = true - continue - } - } - - val = append(val, ch) - } -} |