diff options
Diffstat (limited to 'plugin/auto')
-rw-r--r-- | plugin/auto/README.md | 68 | ||||
-rw-r--r-- | plugin/auto/auto.go | 96 | ||||
-rw-r--r-- | plugin/auto/regexp.go | 20 | ||||
-rw-r--r-- | plugin/auto/regexp_test.go | 20 | ||||
-rw-r--r-- | plugin/auto/setup.go | 172 | ||||
-rw-r--r-- | plugin/auto/setup_test.go | 125 | ||||
-rw-r--r-- | plugin/auto/walk.go | 109 | ||||
-rw-r--r-- | plugin/auto/walk_test.go | 94 | ||||
-rw-r--r-- | plugin/auto/watcher_test.go | 58 | ||||
-rw-r--r-- | plugin/auto/zone.go | 76 |
10 files changed, 838 insertions, 0 deletions
diff --git a/plugin/auto/README.md b/plugin/auto/README.md new file mode 100644 index 000000000..7cbc4fced --- /dev/null +++ b/plugin/auto/README.md @@ -0,0 +1,68 @@ +# auto + +*auto* enables serving zone data from an RFC 1035-style master file which is automatically picked +up from disk. + +The *auto* plugin is used for an "old-style" DNS server. It serves from a preloaded file that exists +on disk. If the zone file contains signatures (i.e. is signed, i.e. DNSSEC) correct DNSSEC answers +are returned. Only NSEC is supported! If you use this setup *you* are responsible for resigning the +zonefile. New zones or changed zone are automatically picked up from disk. + +## Syntax + +~~~ +auto [ZONES...] { + directory DIR [REGEXP ORIGIN_TEMPLATE [TIMEOUT]] + no_reload + upstream ADDRESS... +} +~~~ + +**ZONES** zones it should be authoritative for. If empty, the zones from the configuration block +are used. + +* `directory` loads zones from the speficied **DIR**. If a file name matches **REGEXP** it will be + used to extract the origin. **ORIGIN_TEMPLATE** will be used as a template for the origin. Strings + like `{<number>}` are replaced with the respective matches in the file name, i.e. `{1}` is the + first match, `{2}` is the second, etc.. The default is: `db\.(.*) {1}` e.g. from a file with the + name `db.example.com`, the extracted origin will be `example.com`. **TIMEOUT** specifies how often + CoreDNS should scan the directory, the default is every 60 seconds. This value is in seconds. + The minimum value is 1 second. +* `no_reload` by default CoreDNS will reload a zone from disk whenever it detects a change to the + file. This option disables that behavior. +* `upstream` defines upstream resolvers to be used resolve external names found (think CNAMEs) + pointing to external names. **ADDRESS** can be an IP address, and IP:port or a string pointing to + a file that is structured as /etc/resolv.conf. + +All directives from the *file* plugin are supported. Note that *auto* will load all zones found, +even though the directive might only receive queries for a specific zone. I.e: + +~~~ +auto example.org { + directory /etc/coredns/zones +} +~~~ +Will happily pick up a zone for `example.COM`, except it will never be queried, because the *auto* +directive only is authoritative for `example.ORG`. + +## Examples + +Load `org` domains from `/etc/coredns/zones/org` and allow transfers to the internet, but send +notifies to 10.240.1.1 + +~~~ +auto org { + directory /etc/coredns/zones/org + transfer to * + transfer to 10.240.1.1 +} +~~~ + +Load `org` domains from `/etc/coredns/zones/org` and looks for file names as `www.db.example.org`, +where `example.org` is the origin. Scan every 45 seconds. + +~~~ +auto org { + directory /etc/coredns/zones/org www\.db\.(.*) {1} 45 +} +~~~ diff --git a/plugin/auto/auto.go b/plugin/auto/auto.go new file mode 100644 index 000000000..e9cab1950 --- /dev/null +++ b/plugin/auto/auto.go @@ -0,0 +1,96 @@ +// Package auto implements an on-the-fly loading file backend. +package auto + +import ( + "regexp" + "time" + + "github.com/coredns/coredns/plugin" + "github.com/coredns/coredns/plugin/file" + "github.com/coredns/coredns/plugin/metrics" + "github.com/coredns/coredns/plugin/proxy" + "github.com/coredns/coredns/request" + + "github.com/miekg/dns" + "golang.org/x/net/context" +) + +type ( + // Auto holds the zones and the loader configuration for automatically loading zones. + Auto struct { + Next plugin.Handler + *Zones + + metrics *metrics.Metrics + loader + } + + loader struct { + directory string + template string + re *regexp.Regexp + + // In the future this should be something like ZoneMeta that contains all this stuff. + transferTo []string + noReload bool + proxy proxy.Proxy // Proxy for looking up names during the resolution process + + duration time.Duration + } +) + +// ServeDNS implements the plugin.Handle interface. +func (a Auto) ServeDNS(ctx context.Context, w dns.ResponseWriter, r *dns.Msg) (int, error) { + state := request.Request{W: w, Req: r} + qname := state.Name() + + // TODO(miek): match the qname better in the map + + // Precheck with the origins, i.e. are we allowed to looks here. + zone := plugin.Zones(a.Zones.Origins()).Matches(qname) + if zone == "" { + return plugin.NextOrFailure(a.Name(), a.Next, ctx, w, r) + } + + // Now the real zone. + zone = plugin.Zones(a.Zones.Names()).Matches(qname) + + a.Zones.RLock() + z, ok := a.Zones.Z[zone] + a.Zones.RUnlock() + + if !ok || z == nil { + return dns.RcodeServerFailure, nil + } + + if state.QType() == dns.TypeAXFR || state.QType() == dns.TypeIXFR { + xfr := file.Xfr{Zone: z} + return xfr.ServeDNS(ctx, w, r) + } + + answer, ns, extra, result := z.Lookup(state, qname) + + m := new(dns.Msg) + m.SetReply(r) + m.Authoritative, m.RecursionAvailable, m.Compress = true, true, true + m.Answer, m.Ns, m.Extra = answer, ns, extra + + switch result { + case file.Success: + case file.NoData: + case file.NameError: + m.Rcode = dns.RcodeNameError + case file.Delegation: + m.Authoritative = false + case file.ServerFailure: + return dns.RcodeServerFailure, nil + } + + state.SizeAndDo(m) + m, _ = state.Scrub(m) + w.WriteMsg(m) + return dns.RcodeSuccess, nil +} + +// Name implements the Handler interface. +func (a Auto) Name() string { return "auto" } diff --git a/plugin/auto/regexp.go b/plugin/auto/regexp.go new file mode 100644 index 000000000..fa424ec7e --- /dev/null +++ b/plugin/auto/regexp.go @@ -0,0 +1,20 @@ +package auto + +// rewriteToExpand rewrites our template string to one that we can give to regexp.ExpandString. This basically +// involves prefixing any '{' with a '$'. +func rewriteToExpand(s string) string { + // Pretty dumb at the moment, every { will get a $ prefixed. + // Also wasteful as we build the string with +=. This is OKish + // as we do this during config parsing. + + copy := "" + + for _, c := range s { + if c == '{' { + copy += "$" + } + copy += string(c) + } + + return copy +} diff --git a/plugin/auto/regexp_test.go b/plugin/auto/regexp_test.go new file mode 100644 index 000000000..17c35eb90 --- /dev/null +++ b/plugin/auto/regexp_test.go @@ -0,0 +1,20 @@ +package auto + +import "testing" + +func TestRewriteToExpand(t *testing.T) { + tests := []struct { + in string + expected string + }{ + {in: "", expected: ""}, + {in: "{1}", expected: "${1}"}, + {in: "{1", expected: "${1"}, + } + for i, tc := range tests { + got := rewriteToExpand(tc.in) + if got != tc.expected { + t.Errorf("Test %d: Expected error %v, but got %v", i, tc.expected, got) + } + } +} diff --git a/plugin/auto/setup.go b/plugin/auto/setup.go new file mode 100644 index 000000000..75966f8a0 --- /dev/null +++ b/plugin/auto/setup.go @@ -0,0 +1,172 @@ +package auto + +import ( + "log" + "os" + "path" + "regexp" + "strconv" + "time" + + "github.com/coredns/coredns/core/dnsserver" + "github.com/coredns/coredns/plugin" + "github.com/coredns/coredns/plugin/file" + "github.com/coredns/coredns/plugin/metrics" + "github.com/coredns/coredns/plugin/pkg/dnsutil" + "github.com/coredns/coredns/plugin/proxy" + + "github.com/mholt/caddy" +) + +func init() { + caddy.RegisterPlugin("auto", caddy.Plugin{ + ServerType: "dns", + Action: setup, + }) +} + +func setup(c *caddy.Controller) error { + a, err := autoParse(c) + if err != nil { + return plugin.Error("auto", err) + } + + c.OnStartup(func() error { + m := dnsserver.GetConfig(c).Handler("prometheus") + if m == nil { + return nil + } + (&a).metrics = m.(*metrics.Metrics) + return nil + }) + + walkChan := make(chan bool) + + c.OnStartup(func() error { + err := a.Walk() + if err != nil { + return err + } + + go func() { + ticker := time.NewTicker(a.loader.duration) + for { + select { + case <-walkChan: + return + case <-ticker.C: + a.Walk() + } + } + }() + return nil + }) + + c.OnShutdown(func() error { + close(walkChan) + return nil + }) + + dnsserver.GetConfig(c).AddPlugin(func(next plugin.Handler) plugin.Handler { + a.Next = next + return a + }) + + return nil +} + +func autoParse(c *caddy.Controller) (Auto, error) { + var a = Auto{ + loader: loader{template: "${1}", re: regexp.MustCompile(`db\.(.*)`), duration: 60 * time.Second}, + Zones: &Zones{}, + } + + config := dnsserver.GetConfig(c) + + for c.Next() { + // auto [ZONES...] + a.Zones.origins = make([]string, len(c.ServerBlockKeys)) + copy(a.Zones.origins, c.ServerBlockKeys) + + args := c.RemainingArgs() + if len(args) > 0 { + a.Zones.origins = args + } + for i := range a.Zones.origins { + a.Zones.origins[i] = plugin.Host(a.Zones.origins[i]).Normalize() + } + + for c.NextBlock() { + switch c.Val() { + case "directory": // directory DIR [REGEXP [TEMPLATE] [DURATION]] + if !c.NextArg() { + return a, c.ArgErr() + } + a.loader.directory = c.Val() + if !path.IsAbs(a.loader.directory) && config.Root != "" { + a.loader.directory = path.Join(config.Root, a.loader.directory) + } + _, err := os.Stat(a.loader.directory) + if err != nil { + if os.IsNotExist(err) { + log.Printf("[WARNING] Directory does not exist: %s", a.loader.directory) + } else { + return a, c.Errf("Unable to access root path '%s': %v", a.loader.directory, err) + } + } + + // regexp + if c.NextArg() { + a.loader.re, err = regexp.Compile(c.Val()) + if err != nil { + return a, err + } + if a.loader.re.NumSubexp() == 0 { + return a, c.Errf("Need at least one sub expression") + } + } + + // template + if c.NextArg() { + a.loader.template = rewriteToExpand(c.Val()) + } + + // duration + if c.NextArg() { + i, err := strconv.Atoi(c.Val()) + if err != nil { + return a, err + } + if i < 1 { + i = 1 + } + a.loader.duration = time.Duration(i) * time.Second + } + + case "no_reload": + a.loader.noReload = true + + case "upstream": + args := c.RemainingArgs() + if len(args) == 0 { + return a, c.ArgErr() + } + ups, err := dnsutil.ParseHostPortOrFile(args...) + if err != nil { + return a, err + } + a.loader.proxy = proxy.NewLookup(ups) + + default: + t, _, e := file.TransferParse(c, false) + if e != nil { + return a, e + } + if t != nil { + a.loader.transferTo = append(a.loader.transferTo, t...) + } + } + } + } + return a, nil +} diff --git a/plugin/auto/setup_test.go b/plugin/auto/setup_test.go new file mode 100644 index 000000000..9754551d2 --- /dev/null +++ b/plugin/auto/setup_test.go @@ -0,0 +1,125 @@ +package auto + +import ( + "testing" + + "github.com/mholt/caddy" +) + +func TestAutoParse(t *testing.T) { + tests := []struct { + inputFileRules string + shouldErr bool + expectedDirectory string + expectedTempl string + expectedRe string + expectedTo []string + }{ + { + `auto example.org { + directory /tmp + transfer to 127.0.0.1 + }`, + false, "/tmp", "${1}", `db\.(.*)`, []string{"127.0.0.1:53"}, + }, + { + `auto 10.0.0.0/24 { + directory /tmp + }`, + false, "/tmp", "${1}", `db\.(.*)`, nil, + }, + { + `auto { + directory /tmp + no_reload + }`, + false, "/tmp", "${1}", `db\.(.*)`, nil, + }, + { + `auto { + directory /tmp (.*) bliep + }`, + false, "/tmp", "bliep", `(.*)`, nil, + }, + { + `auto { + directory /tmp (.*) bliep 10 + }`, + false, "/tmp", "bliep", `(.*)`, nil, + }, + { + `auto { + directory /tmp (.*) bliep + transfer to 127.0.0.1 + transfer to 127.0.0.2 + upstream 8.8.8.8 + }`, + false, "/tmp", "bliep", `(.*)`, []string{"127.0.0.1:53", "127.0.0.2:53"}, + }, + // errors + { + `auto example.org { + directory + }`, + true, "", "${1}", `db\.(.*)`, nil, + }, + { + `auto example.org { + directory /tmp * {1} + }`, + true, "", "${1}", ``, nil, + }, + { + `auto example.org { + directory /tmp * {1} aa + }`, + true, "", "${1}", ``, nil, + }, + { + `auto example.org { + directory /tmp .* {1} + }`, + true, "", "${1}", ``, nil, + }, + { + `auto example.org { + directory /tmp .* {1} + }`, + true, "", "${1}", ``, nil, + }, + { + `auto example.org { + directory /tmp .* {1} + }`, + true, "", "${1}", ``, nil, + }, + } + + for i, test := range tests { + c := caddy.NewTestController("dns", test.inputFileRules) + a, err := autoParse(c) + + if err == nil && test.shouldErr { + t.Fatalf("Test %d expected errors, but got no error", i) + } else if err != nil && !test.shouldErr { + t.Fatalf("Test %d expected no errors, but got '%v'", i, err) + } else if !test.shouldErr { + if a.loader.directory != test.expectedDirectory { + t.Fatalf("Test %d expected %v, got %v", i, test.expectedDirectory, a.loader.directory) + } + if a.loader.template != test.expectedTempl { + t.Fatalf("Test %d expected %v, got %v", i, test.expectedTempl, a.loader.template) + } + if a.loader.re.String() != test.expectedRe { + t.Fatalf("Test %d expected %v, got %v", i, test.expectedRe, a.loader.re) + } + if test.expectedTo != nil { + for j, got := range a.loader.transferTo { + if got != test.expectedTo[j] { + t.Fatalf("Test %d expected %v, got %v", i, test.expectedTo[j], got) + } + } + } + } + } +} diff --git a/plugin/auto/walk.go b/plugin/auto/walk.go new file mode 100644 index 000000000..a98f2318e --- /dev/null +++ b/plugin/auto/walk.go @@ -0,0 +1,109 @@ +package auto + +import ( + "log" + "os" + "path" + "path/filepath" + "regexp" + + "github.com/coredns/coredns/plugin/file" + + "github.com/miekg/dns" +) + +// Walk will recursively walk of the file under l.directory and adds the one that match l.re. +func (a Auto) Walk() error { + + // TODO(miek): should add something so that we don't stomp on each other. + + toDelete := make(map[string]bool) + for _, n := range a.Zones.Names() { + toDelete[n] = true + } + + filepath.Walk(a.loader.directory, func(path string, info os.FileInfo, err error) error { + if info == nil || info.IsDir() { + return nil + } + + match, origin := matches(a.loader.re, info.Name(), a.loader.template) + if !match { + return nil + } + + if _, ok := a.Zones.Z[origin]; ok { + // we already have this zone + toDelete[origin] = false + return nil + } + + reader, err := os.Open(path) + if err != nil { + log.Printf("[WARNING] Opening %s failed: %s", path, err) + return nil + } + defer reader.Close() + + // Serial for loading a zone is 0, because it is a new zone. + zo, err := file.Parse(reader, origin, path, 0) + if err != nil { + log.Printf("[WARNING] Parse zone `%s': %v", origin, err) + return nil + } + + zo.NoReload = a.loader.noReload + zo.Proxy = a.loader.proxy + zo.TransferTo = a.loader.transferTo + + a.Zones.Add(zo, origin) + + if a.metrics != nil { + a.metrics.AddZone(origin) + } + + zo.Notify() + + log.Printf("[INFO] Inserting zone `%s' from: %s", origin, path) + + toDelete[origin] = false + + return nil + }) + + for origin, ok := range toDelete { + if !ok { + continue + } + + if a.metrics != nil { + a.metrics.RemoveZone(origin) + } + + a.Zones.Remove(origin) + + log.Printf("[INFO] Deleting zone `%s'", origin) + } + + return nil +} + +// matches matches re to filename, if is is a match, the subexpression will be used to expand +// template to an origin. When match is true that origin is returned. Origin is fully qualified. +func matches(re *regexp.Regexp, filename, template string) (match bool, origin string) { + base := path.Base(filename) + + matches := re.FindStringSubmatchIndex(base) + if matches == nil { + return false, "" + } + + by := re.ExpandString(nil, template, base, matches) + if by == nil { + return false, "" + } + + origin = dns.Fqdn(string(by)) + + return true, origin +} diff --git a/plugin/auto/walk_test.go b/plugin/auto/walk_test.go new file mode 100644 index 000000000..29b9dbb55 --- /dev/null +++ b/plugin/auto/walk_test.go @@ -0,0 +1,94 @@ +package auto + +import ( + "io/ioutil" + "log" + "os" + "path" + "regexp" + "testing" +) + +var dbFiles = []string{"db.example.org", "aa.example.org"} + +const zoneContent = `; testzone +@ IN SOA sns.dns.icann.org. noc.dns.icann.org. 2016082534 7200 3600 1209600 3600 + NS a.iana-servers.net. + NS b.iana-servers.net. + +www IN A 127.0.0.1 +` + +func TestWalk(t *testing.T) { + log.SetOutput(ioutil.Discard) + + tempdir, err := createFiles() + if err != nil { + if tempdir != "" { + os.RemoveAll(tempdir) + } + t.Fatal(err) + } + defer os.RemoveAll(tempdir) + + ldr := loader{ + directory: tempdir, + re: regexp.MustCompile(`db\.(.*)`), + template: `${1}`, + } + + a := Auto{ + loader: ldr, + Zones: &Zones{}, + } + + a.Walk() + + // db.example.org and db.example.com should be here (created in createFiles) + for _, name := range []string{"example.com.", "example.org."} { + if _, ok := a.Zones.Z[name]; !ok { + t.Errorf("%s should have been added", name) + } + } +} + +func TestWalkNonExistent(t *testing.T) { + log.SetOutput(ioutil.Discard) + + nonExistingDir := "highly_unlikely_to_exist_dir" + + ldr := loader{ + directory: nonExistingDir, + re: regexp.MustCompile(`db\.(.*)`), + template: `${1}`, + } + + a := Auto{ + loader: ldr, + Zones: &Zones{}, + } + + a.Walk() +} + +func createFiles() (string, error) { + dir, err := ioutil.TempDir(os.TempDir(), "coredns") + if err != nil { + return dir, err + } + + for _, name := range dbFiles { + if err := ioutil.WriteFile(path.Join(dir, name), []byte(zoneContent), 0644); err != nil { + return dir, err + } + } + // symlinks + if err = os.Symlink(path.Join(dir, "db.example.org"), path.Join(dir, "db.example.com")); err != nil { + return dir, err + } + if err = os.Symlink(path.Join(dir, "db.example.org"), path.Join(dir, "aa.example.com")); err != nil { + return dir, err + } + + return dir, nil +} diff --git a/plugin/auto/watcher_test.go b/plugin/auto/watcher_test.go new file mode 100644 index 000000000..329d8dc85 --- /dev/null +++ b/plugin/auto/watcher_test.go @@ -0,0 +1,58 @@ +package auto + +import ( + "io/ioutil" + "log" + "os" + "path" + "regexp" + "testing" +) + +func TestWatcher(t *testing.T) { + log.SetOutput(ioutil.Discard) + + tempdir, err := createFiles() + if err != nil { + if tempdir != "" { + os.RemoveAll(tempdir) + } + t.Fatal(err) + } + defer os.RemoveAll(tempdir) + + ldr := loader{ + directory: tempdir, + re: regexp.MustCompile(`db\.(.*)`), + template: `${1}`, + } + + a := Auto{ + loader: ldr, + Zones: &Zones{}, + } + + a.Walk() + + // example.org and example.com should exist + if x := len(a.Zones.Z["example.org."].All()); x != 4 { + t.Fatalf("Expected 4 RRs, got %d", x) + } + if x := len(a.Zones.Z["example.com."].All()); x != 4 { + t.Fatalf("Expected 4 RRs, got %d", x) + } + + // Now remove one file, rescan and see if it's gone. + if err := os.Remove(path.Join(tempdir, "db.example.com")); err != nil { + t.Fatal(err) + } + + a.Walk() + + if _, ok := a.Zones.Z["example.com."]; ok { + t.Errorf("Expected %q to be gone.", "example.com.") + } + if _, ok := a.Zones.Z["example.org."]; !ok { + t.Errorf("Expected %q to still be there.", "example.org.") + } +} diff --git a/plugin/auto/zone.go b/plugin/auto/zone.go new file mode 100644 index 000000000..e46f04e33 --- /dev/null +++ b/plugin/auto/zone.go @@ -0,0 +1,76 @@ +// Package auto implements a on-the-fly loading file backend. +package auto + +import ( + "sync" + + "github.com/coredns/coredns/plugin/file" +) + +// Zones maps zone names to a *Zone. This keep track of what we zones we have loaded at +// any one time. +type Zones struct { + Z map[string]*file.Zone // A map mapping zone (origin) to the Zone's data. + names []string // All the keys from the map Z as a string slice. + + origins []string // Any origins from the server block. + + sync.RWMutex +} + +// Names returns the names from z. +func (z *Zones) Names() []string { + z.RLock() + n := z.names + z.RUnlock() + return n +} + +// Origins returns the origins from z. +func (z *Zones) Origins() []string { + // doesn't need locking, because there aren't multiple Go routines accessing it. + return z.origins +} + +// Zones returns a zone with origin name from z, nil when not found. +func (z *Zones) Zones(name string) *file.Zone { + z.RLock() + zo := z.Z[name] + z.RUnlock() + return zo +} + +// Add adds a new zone into z. If zo.NoReload is false, the +// reload goroutine is started. +func (z *Zones) Add(zo *file.Zone, name string) { + z.Lock() + + if z.Z == nil { + z.Z = make(map[string]*file.Zone) + } + + z.Z[name] = zo + z.names = append(z.names, name) + zo.Reload() + + z.Unlock() +} + +// Remove removes the zone named name from z. It also stop the the zone's reload goroutine. +func (z *Zones) Remove(name string) { + z.Lock() + + if zo, ok := z.Z[name]; ok && !zo.NoReload { + zo.ReloadShutdown <- true + } + + delete(z.Z, name) + + // TODO(miek): just regenerate Names (might be bad if you have a lot of zones...) + z.names = []string{} + for n := range z.Z { + z.names = append(z.names, n) + } + + z.Unlock() +} |