2018-01-13 23:52:44 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2018-02-04 11:31:54 +01:00
|
|
|
"io"
|
2018-01-13 23:52:44 +01:00
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
2018-01-30 15:46:21 +01:00
|
|
|
"net/url"
|
2018-01-13 23:52:44 +01:00
|
|
|
"os"
|
2018-03-05 11:58:31 +01:00
|
|
|
"path/filepath"
|
2018-01-13 23:52:44 +01:00
|
|
|
"strings"
|
|
|
|
"time"
|
2018-01-25 15:02:18 +01:00
|
|
|
"unicode"
|
2018-01-13 23:52:44 +01:00
|
|
|
|
|
|
|
"github.com/dchest/safefile"
|
|
|
|
|
|
|
|
"github.com/jedisct1/dlog"
|
2018-04-18 18:58:39 +02:00
|
|
|
stamps "github.com/jedisct1/go-dnsstamps"
|
2018-01-13 23:52:44 +01:00
|
|
|
"github.com/jedisct1/go-minisign"
|
|
|
|
)
|
|
|
|
|
|
|
|
type SourceFormat int
|
|
|
|
|
|
|
|
const (
|
2018-04-18 19:06:50 +02:00
|
|
|
SourceFormatV2 = iota
|
2018-01-13 23:52:44 +01:00
|
|
|
)
|
|
|
|
|
2018-01-18 23:19:14 +01:00
|
|
|
const (
|
2019-10-31 05:32:21 +01:00
|
|
|
DefaultPrefetchDelay time.Duration = 24 * time.Hour
|
|
|
|
MinimumPrefetchInterval time.Duration = 10 * time.Minute
|
2018-01-18 23:19:14 +01:00
|
|
|
)
|
|
|
|
|
2018-01-13 23:52:44 +01:00
|
|
|
type Source struct {
|
2019-10-30 03:00:49 +01:00
|
|
|
urls []string
|
2019-10-31 02:04:08 +01:00
|
|
|
prefetch []*URLToPrefetch
|
2019-10-30 03:00:49 +01:00
|
|
|
format SourceFormat
|
2019-10-30 06:31:28 +01:00
|
|
|
in []byte
|
2019-10-30 03:00:49 +01:00
|
|
|
minisignKey *minisign.PublicKey
|
|
|
|
}
|
|
|
|
|
2019-10-30 06:31:28 +01:00
|
|
|
func (source *Source) checkSignature(bin, sig []byte) (err error) {
|
2019-10-30 03:00:49 +01:00
|
|
|
var signature minisign.Signature
|
2019-10-30 06:31:28 +01:00
|
|
|
if signature, err = minisign.DecodeSignature(string(sig)); err == nil {
|
|
|
|
_, err = source.minisignKey.Verify(bin, signature)
|
2019-10-30 03:00:49 +01:00
|
|
|
}
|
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
|
2019-11-03 07:34:59 +01:00
|
|
|
// timeNow can be replaced by tests to provide a static value
|
|
|
|
var timeNow = time.Now
|
|
|
|
|
2019-10-31 00:40:06 +01:00
|
|
|
func fetchFromCache(cacheFile string, refreshDelay time.Duration) (bin []byte, delayTillNextUpdate time.Duration, err error) {
|
2019-10-30 05:24:59 +01:00
|
|
|
delayTillNextUpdate = time.Duration(0)
|
2019-10-31 05:32:21 +01:00
|
|
|
if refreshDelay < DefaultPrefetchDelay {
|
|
|
|
refreshDelay = DefaultPrefetchDelay
|
2019-06-13 11:24:15 +02:00
|
|
|
}
|
2019-10-31 00:40:06 +01:00
|
|
|
var fi os.FileInfo
|
|
|
|
if fi, err = os.Stat(cacheFile); err != nil {
|
2018-01-20 00:30:33 +01:00
|
|
|
return
|
|
|
|
}
|
2019-10-30 06:31:28 +01:00
|
|
|
if bin, err = ioutil.ReadFile(cacheFile); err != nil {
|
2018-01-20 00:30:33 +01:00
|
|
|
return
|
|
|
|
}
|
2019-10-31 00:40:06 +01:00
|
|
|
if elapsed := timeNow().Sub(fi.ModTime()); elapsed < refreshDelay {
|
2019-10-30 05:24:59 +01:00
|
|
|
dlog.Debugf("Cache file [%s] is still fresh", cacheFile)
|
2019-10-31 05:32:21 +01:00
|
|
|
delayTillNextUpdate = DefaultPrefetchDelay - elapsed
|
2019-10-30 05:24:59 +01:00
|
|
|
} else {
|
|
|
|
dlog.Debugf("Cache file [%s] needs to be refreshed", cacheFile)
|
2018-02-19 19:24:51 +01:00
|
|
|
}
|
2018-01-20 00:30:33 +01:00
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
|
2019-10-30 05:12:50 +01:00
|
|
|
func fetchFromURL(xTransport *XTransport, u *url.URL) (bin []byte, err error) {
|
|
|
|
var resp *http.Response
|
|
|
|
if resp, _, err = xTransport.Get(u, "", DefaultTimeout); err == nil {
|
|
|
|
bin, err = ioutil.ReadAll(io.LimitReader(resp.Body, MaxHTTPBodyLength))
|
|
|
|
resp.Body.Close()
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-10-30 06:31:28 +01:00
|
|
|
func fetchWithCache(xTransport *XTransport, urlStr string, cacheFile string, refreshDelay time.Duration) (bin []byte, delayTillNextUpdate time.Duration, err error) {
|
2019-10-31 00:40:06 +01:00
|
|
|
if bin, delayTillNextUpdate, err = fetchFromCache(cacheFile, refreshDelay); err != nil {
|
|
|
|
if len(urlStr) == 0 {
|
|
|
|
dlog.Errorf("Cache file [%s] not present and no URL given to retrieve it", cacheFile)
|
|
|
|
return
|
2018-02-19 19:24:51 +01:00
|
|
|
}
|
2019-10-31 00:40:06 +01:00
|
|
|
dlog.Debugf("Cache file [%s] not present", cacheFile)
|
|
|
|
}
|
|
|
|
if err == nil && delayTillNextUpdate > 0 {
|
|
|
|
dlog.Debugf("Delay till next update: %v", delayTillNextUpdate)
|
2018-01-31 14:23:44 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-01-30 15:46:21 +01:00
|
|
|
dlog.Infof("Loading source information from URL [%s]", urlStr)
|
|
|
|
|
2019-10-30 05:12:50 +01:00
|
|
|
var u *url.URL
|
|
|
|
if u, err = url.Parse(urlStr); err != nil {
|
2018-01-20 00:30:33 +01:00
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2019-10-30 05:12:50 +01:00
|
|
|
if bin, err = fetchFromURL(xTransport, u); err != nil {
|
2018-01-20 00:30:33 +01:00
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2019-10-30 05:45:04 +01:00
|
|
|
if err = AtomicFileWrite(cacheFile, bin); err != nil {
|
|
|
|
if absPath, err2 := filepath.Abs(cacheFile); err2 == nil {
|
|
|
|
dlog.Warnf("%s: %s", absPath, err)
|
|
|
|
}
|
|
|
|
}
|
2019-10-31 05:32:21 +01:00
|
|
|
delayTillNextUpdate = DefaultPrefetchDelay
|
2018-01-13 23:52:44 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func AtomicFileWrite(file string, data []byte) error {
|
|
|
|
return safefile.WriteFile(file, data, 0644)
|
|
|
|
}
|
|
|
|
|
2018-01-18 23:19:14 +01:00
|
|
|
type URLToPrefetch struct {
|
|
|
|
url string
|
|
|
|
cacheFile string
|
|
|
|
when time.Time
|
|
|
|
}
|
|
|
|
|
2019-10-31 02:22:48 +01:00
|
|
|
func NewSource(xTransport *XTransport, urls []string, minisignKeyStr string, cacheFile string, formatStr string, refreshDelay time.Duration) (source *Source, err error) {
|
|
|
|
source = &Source{urls: urls}
|
2018-04-18 19:06:50 +02:00
|
|
|
if formatStr == "v2" {
|
2018-01-25 15:02:18 +01:00
|
|
|
source.format = SourceFormatV2
|
|
|
|
} else {
|
2019-10-31 02:04:08 +01:00
|
|
|
return source, fmt.Errorf("Unsupported source format: [%s]", formatStr)
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2019-10-30 03:00:49 +01:00
|
|
|
if minisignKey, err := minisign.NewPublicKey(minisignKeyStr); err == nil {
|
|
|
|
source.minisignKey = &minisignKey
|
|
|
|
} else {
|
2019-10-31 02:04:08 +01:00
|
|
|
return source, err
|
2018-01-18 23:19:14 +01:00
|
|
|
}
|
2019-11-03 07:34:59 +01:00
|
|
|
now := timeNow()
|
2018-01-13 23:52:44 +01:00
|
|
|
sigCacheFile := cacheFile + ".minisig"
|
2019-10-31 02:04:08 +01:00
|
|
|
source.prefetch = []*URLToPrefetch{}
|
2018-01-20 00:30:33 +01:00
|
|
|
|
2019-10-30 06:31:28 +01:00
|
|
|
var bin, sig []byte
|
2018-03-28 13:36:19 +02:00
|
|
|
var delayTillNextUpdate, sigDelayTillNextUpdate time.Duration
|
2019-10-31 00:40:06 +01:00
|
|
|
var sigErr error
|
2018-03-28 13:36:19 +02:00
|
|
|
var preloadURL string
|
|
|
|
if len(urls) <= 0 {
|
2019-10-30 06:31:28 +01:00
|
|
|
bin, delayTillNextUpdate, err = fetchWithCache(xTransport, "", cacheFile, refreshDelay)
|
|
|
|
sig, sigDelayTillNextUpdate, sigErr = fetchWithCache(xTransport, "", sigCacheFile, refreshDelay)
|
2018-03-28 13:36:19 +02:00
|
|
|
} else {
|
|
|
|
preloadURL = urls[0]
|
|
|
|
for _, url := range urls {
|
|
|
|
sigURL := url + ".minisig"
|
2019-10-30 06:31:28 +01:00
|
|
|
bin, delayTillNextUpdate, err = fetchWithCache(xTransport, url, cacheFile, refreshDelay)
|
|
|
|
sig, sigDelayTillNextUpdate, sigErr = fetchWithCache(xTransport, sigURL, sigCacheFile, refreshDelay)
|
2018-03-28 13:36:19 +02:00
|
|
|
if err == nil && sigErr == nil {
|
|
|
|
preloadURL = url
|
|
|
|
break
|
|
|
|
}
|
|
|
|
dlog.Infof("Loading from [%s] failed", url)
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
2018-03-28 13:36:19 +02:00
|
|
|
}
|
|
|
|
if len(preloadURL) > 0 {
|
|
|
|
url := preloadURL
|
|
|
|
sigURL := url + ".minisig"
|
2019-10-31 02:04:08 +01:00
|
|
|
source.prefetch = append(source.prefetch, &URLToPrefetch{url: url, cacheFile: cacheFile, when: now.Add(delayTillNextUpdate)})
|
|
|
|
source.prefetch = append(source.prefetch, &URLToPrefetch{url: sigURL, cacheFile: sigCacheFile, when: now.Add(sigDelayTillNextUpdate)})
|
2018-03-28 13:36:19 +02:00
|
|
|
}
|
|
|
|
if sigErr != nil && err == nil {
|
|
|
|
err = sigErr
|
|
|
|
}
|
|
|
|
if err != nil {
|
2019-10-31 00:40:06 +01:00
|
|
|
return
|
2018-01-20 01:00:19 +01:00
|
|
|
}
|
|
|
|
|
2019-10-30 06:31:28 +01:00
|
|
|
if err = source.checkSignature(bin, sig); err != nil {
|
2019-10-31 00:40:06 +01:00
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-03-28 13:36:19 +02:00
|
|
|
dlog.Noticef("Source [%s] loaded", cacheFile)
|
2019-10-30 06:31:28 +01:00
|
|
|
source.in = bin
|
2019-10-31 00:40:06 +01:00
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
|
2019-10-31 02:22:48 +01:00
|
|
|
func PrefetchSources(xTransport *XTransport, sources []*Source) time.Duration {
|
|
|
|
now := timeNow()
|
2019-10-31 05:32:21 +01:00
|
|
|
interval := MinimumPrefetchInterval
|
2019-10-31 02:22:48 +01:00
|
|
|
for _, source := range sources {
|
|
|
|
for _, urlToPrefetch := range source.prefetch {
|
|
|
|
if now.After(urlToPrefetch.when) {
|
|
|
|
dlog.Debugf("Prefetching [%s]", urlToPrefetch.url)
|
|
|
|
if err := PrefetchSourceURL(xTransport, urlToPrefetch); err != nil {
|
|
|
|
dlog.Debugf("Prefetching [%s] failed: %s", urlToPrefetch.url, err)
|
|
|
|
} else {
|
|
|
|
dlog.Debugf("Prefetching [%s] succeeded. Next refresh scheduled for %v", urlToPrefetch.url, urlToPrefetch.when)
|
2019-10-31 05:32:21 +01:00
|
|
|
delay := urlToPrefetch.when.Sub(now)
|
|
|
|
if delay >= MinimumPrefetchInterval && (interval == MinimumPrefetchInterval || interval > delay) {
|
|
|
|
interval = delay
|
|
|
|
}
|
2019-10-31 02:22:48 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-10-31 05:32:21 +01:00
|
|
|
return interval
|
2019-10-31 02:22:48 +01:00
|
|
|
}
|
|
|
|
|
2018-01-20 16:59:40 +01:00
|
|
|
func (source *Source) Parse(prefix string) ([]RegisteredServer, error) {
|
2018-04-18 19:06:50 +02:00
|
|
|
if source.format == SourceFormatV2 {
|
2018-01-25 15:02:18 +01:00
|
|
|
return source.parseV2(prefix)
|
|
|
|
}
|
|
|
|
dlog.Fatal("Unexpected source format")
|
|
|
|
return []RegisteredServer{}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (source *Source) parseV2(prefix string) ([]RegisteredServer, error) {
|
|
|
|
var registeredServers []RegisteredServer
|
2019-10-06 12:14:21 +02:00
|
|
|
var stampErrs []string
|
|
|
|
appendStampErr := func(format string, a ...interface{}) {
|
|
|
|
stampErr := fmt.Sprintf(format, a...)
|
|
|
|
stampErrs = append(stampErrs, stampErr)
|
|
|
|
dlog.Warn(stampErr)
|
|
|
|
}
|
2018-01-25 15:02:18 +01:00
|
|
|
in := string(source.in)
|
|
|
|
parts := strings.Split(in, "## ")
|
|
|
|
if len(parts) < 2 {
|
2018-03-28 13:36:19 +02:00
|
|
|
return registeredServers, fmt.Errorf("Invalid format for source at [%v]", source.urls)
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
|
|
|
parts = parts[1:]
|
2019-10-06 12:14:21 +02:00
|
|
|
PartsLoop:
|
2018-01-25 15:02:18 +01:00
|
|
|
for _, part := range parts {
|
|
|
|
part = strings.TrimFunc(part, unicode.IsSpace)
|
|
|
|
subparts := strings.Split(part, "\n")
|
|
|
|
if len(subparts) < 2 {
|
2018-03-28 13:36:19 +02:00
|
|
|
return registeredServers, fmt.Errorf("Invalid format for source at [%v]", source.urls)
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
|
|
|
name := strings.TrimFunc(subparts[0], unicode.IsSpace)
|
|
|
|
if len(name) == 0 {
|
2018-03-28 13:36:19 +02:00
|
|
|
return registeredServers, fmt.Errorf("Invalid format for source at [%v]", source.urls)
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
2018-01-31 09:42:56 +01:00
|
|
|
subparts = subparts[1:]
|
2018-01-30 19:47:29 +01:00
|
|
|
name = prefix + name
|
2018-01-31 09:42:56 +01:00
|
|
|
var stampStr, description string
|
2018-01-25 15:02:18 +01:00
|
|
|
for _, subpart := range subparts {
|
|
|
|
subpart = strings.TrimFunc(subpart, unicode.IsSpace)
|
2019-03-03 18:20:39 +01:00
|
|
|
if strings.HasPrefix(subpart, "sdns:") {
|
2018-01-31 09:42:56 +01:00
|
|
|
if len(stampStr) > 0 {
|
2019-10-06 12:14:21 +02:00
|
|
|
appendStampErr("Multiple stamps for server [%s]", name)
|
|
|
|
continue PartsLoop
|
2018-01-31 09:42:56 +01:00
|
|
|
}
|
2018-01-25 15:02:18 +01:00
|
|
|
stampStr = subpart
|
2018-01-31 09:42:56 +01:00
|
|
|
continue
|
|
|
|
} else if len(subpart) == 0 || strings.HasPrefix(subpart, "//") {
|
|
|
|
continue
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
2018-01-31 09:42:56 +01:00
|
|
|
if len(description) > 0 {
|
|
|
|
description += "\n"
|
|
|
|
}
|
|
|
|
description += subpart
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
2019-03-03 18:20:39 +01:00
|
|
|
if len(stampStr) < 6 {
|
2019-10-06 12:14:21 +02:00
|
|
|
appendStampErr("Missing stamp for server [%s]", name)
|
|
|
|
continue
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
2018-04-14 15:03:21 +02:00
|
|
|
stamp, err := stamps.NewServerStampFromString(stampStr)
|
2018-01-25 15:02:18 +01:00
|
|
|
if err != nil {
|
2019-10-06 12:14:21 +02:00
|
|
|
appendStampErr("Invalid or unsupported stamp [%v]: %s", stampStr, err.Error())
|
|
|
|
continue
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
|
|
|
registeredServer := RegisteredServer{
|
2018-01-31 09:42:56 +01:00
|
|
|
name: name, stamp: stamp, description: description,
|
2018-01-25 15:02:18 +01:00
|
|
|
}
|
|
|
|
dlog.Debugf("Registered [%s] with stamp [%s]", name, stamp.String())
|
|
|
|
registeredServers = append(registeredServers, registeredServer)
|
|
|
|
}
|
2019-10-06 12:14:21 +02:00
|
|
|
if len(stampErrs) > 0 {
|
|
|
|
return registeredServers, fmt.Errorf("%s", strings.Join(stampErrs, ", "))
|
|
|
|
}
|
2018-01-25 15:02:18 +01:00
|
|
|
return registeredServers, nil
|
|
|
|
}
|
|
|
|
|
2018-01-30 15:46:21 +01:00
|
|
|
func PrefetchSourceURL(xTransport *XTransport, urlToPrefetch *URLToPrefetch) error {
|
2019-10-31 05:32:21 +01:00
|
|
|
_, delayTillNextUpdate, err := fetchWithCache(xTransport, urlToPrefetch.url, urlToPrefetch.cacheFile, DefaultPrefetchDelay)
|
2019-11-03 07:34:59 +01:00
|
|
|
urlToPrefetch.when = timeNow().Add(delayTillNextUpdate)
|
2018-01-20 00:30:33 +01:00
|
|
|
return err
|
2018-01-18 23:19:14 +01:00
|
|
|
}
|