aboutsummaryrefslogtreecommitdiff
path: root/core/parse/lexer_test.go
diff options
context:
space:
mode:
authorGravatar Miek Gieben <miek@miek.nl> 2016-08-19 17:14:17 -0700
committerGravatar GitHub <noreply@github.com> 2016-08-19 17:14:17 -0700
commit9ac3cab1b7b1b1e78f86ce3c6a80fbee312162e6 (patch)
tree437e9755927c33af16276ad2602a6da115f948cb /core/parse/lexer_test.go
parenta1989c35231b0e5ea271b2f68d82c1a63e697cd0 (diff)
downloadcoredns-9ac3cab1b7b1b1e78f86ce3c6a80fbee312162e6.tar.gz
coredns-9ac3cab1b7b1b1e78f86ce3c6a80fbee312162e6.tar.zst
coredns-9ac3cab1b7b1b1e78f86ce3c6a80fbee312162e6.zip
Make CoreDNS a server type plugin for Caddy (#220)
* Make CoreDNS a server type plugin for Caddy Remove code we don't need and port all middleware over. Fix all tests and rework the documentation. Also make `go generate` build a caddy binary which we then copy into our directory. This means `go build`-builds remain working as-is. And new etc instances in each etcd test for better isolation. Fix more tests and rework test.Server with the newer support Caddy offers. Fix Makefile to support new mode of operation.
Diffstat (limited to 'core/parse/lexer_test.go')
-rw-r--r--core/parse/lexer_test.go165
1 files changed, 0 insertions, 165 deletions
diff --git a/core/parse/lexer_test.go b/core/parse/lexer_test.go
deleted file mode 100644
index f12c7e7dc..000000000
--- a/core/parse/lexer_test.go
+++ /dev/null
@@ -1,165 +0,0 @@
-package parse
-
-import (
- "strings"
- "testing"
-)
-
-type lexerTestCase struct {
- input string
- expected []token
-}
-
-func TestLexer(t *testing.T) {
- testCases := []lexerTestCase{
- {
- input: `host:123`,
- expected: []token{
- {line: 1, text: "host:123"},
- },
- },
- {
- input: `host:123
-
- directive`,
- expected: []token{
- {line: 1, text: "host:123"},
- {line: 3, text: "directive"},
- },
- },
- {
- input: `host:123 {
- directive
- }`,
- expected: []token{
- {line: 1, text: "host:123"},
- {line: 1, text: "{"},
- {line: 2, text: "directive"},
- {line: 3, text: "}"},
- },
- },
- {
- input: `host:123 { directive }`,
- expected: []token{
- {line: 1, text: "host:123"},
- {line: 1, text: "{"},
- {line: 1, text: "directive"},
- {line: 1, text: "}"},
- },
- },
- {
- input: `host:123 {
- #comment
- directive
- # comment
- foobar # another comment
- }`,
- expected: []token{
- {line: 1, text: "host:123"},
- {line: 1, text: "{"},
- {line: 3, text: "directive"},
- {line: 5, text: "foobar"},
- {line: 6, text: "}"},
- },
- },
- {
- input: `a "quoted value" b
- foobar`,
- expected: []token{
- {line: 1, text: "a"},
- {line: 1, text: "quoted value"},
- {line: 1, text: "b"},
- {line: 2, text: "foobar"},
- },
- },
- {
- input: `A "quoted \"value\" inside" B`,
- expected: []token{
- {line: 1, text: "A"},
- {line: 1, text: `quoted "value" inside`},
- {line: 1, text: "B"},
- },
- },
- {
- input: `"don't\escape"`,
- expected: []token{
- {line: 1, text: `don't\escape`},
- },
- },
- {
- input: `"don't\\escape"`,
- expected: []token{
- {line: 1, text: `don't\\escape`},
- },
- },
- {
- input: `A "quoted value with line
- break inside" {
- foobar
- }`,
- expected: []token{
- {line: 1, text: "A"},
- {line: 1, text: "quoted value with line\n\t\t\t\t\tbreak inside"},
- {line: 2, text: "{"},
- {line: 3, text: "foobar"},
- {line: 4, text: "}"},
- },
- },
- {
- input: `"C:\php\php-cgi.exe"`,
- expected: []token{
- {line: 1, text: `C:\php\php-cgi.exe`},
- },
- },
- {
- input: `empty "" string`,
- expected: []token{
- {line: 1, text: `empty`},
- {line: 1, text: ``},
- {line: 1, text: `string`},
- },
- },
- {
- input: "skip those\r\nCR characters",
- expected: []token{
- {line: 1, text: "skip"},
- {line: 1, text: "those"},
- {line: 2, text: "CR"},
- {line: 2, text: "characters"},
- },
- },
- }
-
- for i, testCase := range testCases {
- actual := tokenize(testCase.input)
- lexerCompare(t, i, testCase.expected, actual)
- }
-}
-
-func tokenize(input string) (tokens []token) {
- l := lexer{}
- l.load(strings.NewReader(input))
- for l.next() {
- tokens = append(tokens, l.token)
- }
- return
-}
-
-func lexerCompare(t *testing.T, n int, expected, actual []token) {
- if len(expected) != len(actual) {
- t.Errorf("Test case %d: expected %d token(s) but got %d", n, len(expected), len(actual))
- }
-
- for i := 0; i < len(actual) && i < len(expected); i++ {
- if actual[i].line != expected[i].line {
- t.Errorf("Test case %d token %d ('%s'): expected line %d but was line %d",
- n, i, expected[i].text, expected[i].line, actual[i].line)
- break
- }
- if actual[i].text != expected[i].text {
- t.Errorf("Test case %d token %d: expected text '%s' but was '%s'",
- n, i, expected[i].text, actual[i].text)
- break
- }
- }
-}