2018-01-13 23:52:44 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/csv"
|
2018-01-20 00:30:33 +01:00
|
|
|
"errors"
|
2018-01-13 23:52:44 +01:00
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"strings"
|
|
|
|
"time"
|
2018-01-25 15:02:18 +01:00
|
|
|
"unicode"
|
2018-01-13 23:52:44 +01:00
|
|
|
|
|
|
|
"github.com/dchest/safefile"
|
|
|
|
|
|
|
|
"github.com/jedisct1/dlog"
|
|
|
|
"github.com/jedisct1/go-minisign"
|
|
|
|
)
|
|
|
|
|
|
|
|
type SourceFormat int
|
|
|
|
|
|
|
|
const (
|
|
|
|
SourceFormatV1 = iota
|
2018-01-25 15:02:18 +01:00
|
|
|
SourceFormatV2
|
2018-01-13 23:52:44 +01:00
|
|
|
)
|
|
|
|
|
2018-01-18 23:19:14 +01:00
|
|
|
const (
|
2018-01-20 01:00:19 +01:00
|
|
|
SourcesUpdateDelay = time.Duration(24) * time.Hour
|
2018-01-18 23:19:14 +01:00
|
|
|
)
|
|
|
|
|
2018-01-13 23:52:44 +01:00
|
|
|
type Source struct {
|
|
|
|
url string
|
|
|
|
format SourceFormat
|
|
|
|
in string
|
|
|
|
}
|
|
|
|
|
2018-01-20 00:30:33 +01:00
|
|
|
func fetchFromCache(cacheFile string) (in string, delayTillNextUpdate time.Duration, err error) {
|
|
|
|
fi, err := os.Stat(cacheFile)
|
|
|
|
if err != nil {
|
|
|
|
delayTillNextUpdate = time.Duration(0)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
elapsed := time.Since(fi.ModTime())
|
|
|
|
if elapsed < SourcesUpdateDelay {
|
|
|
|
dlog.Debugf("Cache file [%s] is still fresh", cacheFile)
|
|
|
|
delayTillNextUpdate = SourcesUpdateDelay - elapsed
|
|
|
|
} else {
|
|
|
|
dlog.Debugf("Cache file [%s] needs to be refreshed", cacheFile)
|
|
|
|
delayTillNextUpdate = time.Duration(0)
|
|
|
|
}
|
|
|
|
var bin []byte
|
|
|
|
bin, err = ioutil.ReadFile(cacheFile)
|
|
|
|
if err != nil {
|
|
|
|
delayTillNextUpdate = time.Duration(0)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
in = string(bin)
|
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
|
2018-01-20 00:30:33 +01:00
|
|
|
func fetchWithCache(url string, cacheFile string) (in string, cached bool, delayTillNextUpdate time.Duration, err error) {
|
|
|
|
cached = false
|
|
|
|
in, delayTillNextUpdate, err = fetchFromCache(cacheFile)
|
2018-01-13 23:52:44 +01:00
|
|
|
if err == nil {
|
2018-01-20 00:30:33 +01:00
|
|
|
dlog.Debugf("Delay till next update: %v", delayTillNextUpdate)
|
|
|
|
cached = true
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var resp *http.Response
|
|
|
|
dlog.Infof("Loading source information from URL [%s]", url)
|
|
|
|
resp, err = http.Get(url)
|
|
|
|
if err == nil && resp != nil && (resp.StatusCode < 200 || resp.StatusCode > 299) {
|
|
|
|
err = fmt.Errorf("Webserver returned code %d", resp.StatusCode)
|
|
|
|
return
|
|
|
|
} else if err != nil {
|
|
|
|
return
|
|
|
|
} else if resp == nil {
|
|
|
|
err = errors.New("Webserver returned an error")
|
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-01-20 00:30:33 +01:00
|
|
|
var bin []byte
|
|
|
|
bin, err = ioutil.ReadAll(resp.Body)
|
|
|
|
resp.Body.Close()
|
|
|
|
if err != nil {
|
|
|
|
return
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-01-18 23:31:14 +01:00
|
|
|
err = nil
|
2018-01-13 23:52:44 +01:00
|
|
|
in = string(bin)
|
2018-01-20 00:30:33 +01:00
|
|
|
delayTillNextUpdate = SourcesUpdateDelay
|
2018-01-13 23:52:44 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func AtomicFileWrite(file string, data []byte) error {
|
|
|
|
return safefile.WriteFile(file, data, 0644)
|
|
|
|
}
|
|
|
|
|
2018-01-18 23:19:14 +01:00
|
|
|
type URLToPrefetch struct {
|
|
|
|
url string
|
|
|
|
cacheFile string
|
|
|
|
when time.Time
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewSource(url string, minisignKeyStr string, cacheFile string, formatStr string, refreshDelay time.Duration) (Source, []URLToPrefetch, error) {
|
2018-01-20 00:30:33 +01:00
|
|
|
_ = refreshDelay
|
2018-01-13 23:52:44 +01:00
|
|
|
source := Source{url: url}
|
2018-01-25 15:02:18 +01:00
|
|
|
if formatStr == "v1" {
|
|
|
|
source.format = SourceFormatV1
|
|
|
|
} else if formatStr == "v2" {
|
|
|
|
source.format = SourceFormatV2
|
|
|
|
} else {
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, []URLToPrefetch{}, fmt.Errorf("Unsupported source format: [%s]", formatStr)
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
minisignKey, err := minisign.NewPublicKey(minisignKeyStr)
|
|
|
|
if err != nil {
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, []URLToPrefetch{}, err
|
|
|
|
}
|
2018-01-20 00:30:33 +01:00
|
|
|
now := time.Now()
|
|
|
|
urlsToPrefetch := []URLToPrefetch{}
|
|
|
|
|
2018-01-18 23:19:14 +01:00
|
|
|
sigURL := url + ".minisig"
|
2018-01-20 00:30:33 +01:00
|
|
|
in, cached, delayTillNextUpdate, err := fetchWithCache(url, cacheFile)
|
|
|
|
urlsToPrefetch = append(urlsToPrefetch, URLToPrefetch{url: url, cacheFile: cacheFile, when: now.Add(delayTillNextUpdate)})
|
|
|
|
|
2018-01-13 23:52:44 +01:00
|
|
|
sigCacheFile := cacheFile + ".minisig"
|
2018-01-20 01:00:19 +01:00
|
|
|
sigStr, sigCached, sigDelayTillNextUpdate, sigErr := fetchWithCache(sigURL, sigCacheFile)
|
2018-01-20 00:30:33 +01:00
|
|
|
urlsToPrefetch = append(urlsToPrefetch, URLToPrefetch{url: sigURL, cacheFile: sigCacheFile, when: now.Add(sigDelayTillNextUpdate)})
|
|
|
|
|
2018-01-20 01:00:19 +01:00
|
|
|
if err != nil || sigErr != nil {
|
2018-01-25 15:02:18 +01:00
|
|
|
if err == nil {
|
|
|
|
err = sigErr
|
|
|
|
}
|
|
|
|
return source, urlsToPrefetch, err
|
2018-01-20 01:00:19 +01:00
|
|
|
}
|
|
|
|
|
2018-01-13 23:52:44 +01:00
|
|
|
signature, err := minisign.DecodeSignature(sigStr)
|
|
|
|
if err != nil {
|
2018-01-18 23:33:30 +01:00
|
|
|
os.Remove(cacheFile)
|
|
|
|
os.Remove(sigCacheFile)
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, err
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
res, err := minisignKey.Verify([]byte(in), signature)
|
2018-01-18 14:28:05 +01:00
|
|
|
if err != nil || !res {
|
2018-01-18 23:33:30 +01:00
|
|
|
os.Remove(cacheFile)
|
|
|
|
os.Remove(sigCacheFile)
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, err
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-01-20 00:30:33 +01:00
|
|
|
if !cached {
|
2018-01-13 23:52:44 +01:00
|
|
|
if err = AtomicFileWrite(cacheFile, []byte(in)); err != nil {
|
2018-01-20 00:30:33 +01:00
|
|
|
dlog.Warnf("%s: %s", cacheFile, err)
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
}
|
2018-01-20 00:30:33 +01:00
|
|
|
if !sigCached {
|
2018-01-13 23:52:44 +01:00
|
|
|
if err = AtomicFileWrite(sigCacheFile, []byte(sigStr)); err != nil {
|
2018-01-20 00:30:33 +01:00
|
|
|
dlog.Warnf("%s: %s", sigCacheFile, err)
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
dlog.Noticef("Source [%s] loaded", url)
|
|
|
|
source.in = in
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, nil
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
|
2018-01-20 16:59:40 +01:00
|
|
|
func (source *Source) Parse(prefix string) ([]RegisteredServer, error) {
|
2018-01-25 15:02:18 +01:00
|
|
|
if source.format == SourceFormatV1 {
|
|
|
|
return source.parseV1(prefix)
|
|
|
|
} else if source.format == SourceFormatV2 {
|
|
|
|
return source.parseV2(prefix)
|
|
|
|
}
|
|
|
|
dlog.Fatal("Unexpected source format")
|
|
|
|
return []RegisteredServer{}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (source *Source) parseV1(prefix string) ([]RegisteredServer, error) {
|
2018-01-13 23:52:44 +01:00
|
|
|
var registeredServers []RegisteredServer
|
|
|
|
|
|
|
|
csvReader := csv.NewReader(strings.NewReader(source.in))
|
|
|
|
records, err := csvReader.ReadAll()
|
|
|
|
if err != nil {
|
|
|
|
return registeredServers, nil
|
|
|
|
}
|
2018-01-17 02:40:47 +01:00
|
|
|
for lineNo, record := range records {
|
2018-01-13 23:53:33 +01:00
|
|
|
if len(record) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
2018-01-13 23:52:44 +01:00
|
|
|
if len(record) < 14 {
|
2018-01-17 09:44:03 +01:00
|
|
|
return registeredServers, fmt.Errorf("Parse error at line %d", 1+lineNo)
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-01-17 02:40:47 +01:00
|
|
|
if lineNo == 0 {
|
2018-01-13 23:52:44 +01:00
|
|
|
continue
|
|
|
|
}
|
2018-01-20 16:59:40 +01:00
|
|
|
name := prefix + record[0]
|
2018-01-13 23:52:44 +01:00
|
|
|
serverAddrStr := record[10]
|
|
|
|
providerName := record[11]
|
|
|
|
serverPkStr := record[12]
|
2018-01-18 13:01:16 +01:00
|
|
|
props := ServerInformalProperties(0)
|
|
|
|
if strings.EqualFold(record[7], "yes") {
|
|
|
|
props |= ServerInformalPropertyDNSSEC
|
|
|
|
}
|
|
|
|
if strings.EqualFold(record[8], "yes") {
|
|
|
|
props |= ServerInformalPropertyNoLog
|
|
|
|
}
|
2018-01-26 20:38:31 +01:00
|
|
|
stamp, err := NewDNSCryptServerStampFromLegacy(serverAddrStr, serverPkStr, providerName, props)
|
2018-01-13 23:52:44 +01:00
|
|
|
if err != nil {
|
|
|
|
return registeredServers, err
|
|
|
|
}
|
|
|
|
registeredServer := RegisteredServer{
|
|
|
|
name: name, stamp: stamp,
|
|
|
|
}
|
2018-01-20 13:56:26 +01:00
|
|
|
dlog.Debugf("Registered [%s] with stamp [%s]", name, stamp.String())
|
2018-01-13 23:52:44 +01:00
|
|
|
registeredServers = append(registeredServers, registeredServer)
|
|
|
|
}
|
|
|
|
return registeredServers, nil
|
|
|
|
}
|
2018-01-18 23:19:14 +01:00
|
|
|
|
2018-01-25 15:02:18 +01:00
|
|
|
func (source *Source) parseV2(prefix string) ([]RegisteredServer, error) {
|
|
|
|
var registeredServers []RegisteredServer
|
|
|
|
in := string(source.in)
|
|
|
|
parts := strings.Split(in, "## ")
|
|
|
|
if len(parts) < 2 {
|
|
|
|
return registeredServers, fmt.Errorf("Invalid format for source at [%s]", source.url)
|
|
|
|
}
|
|
|
|
parts = parts[1:]
|
|
|
|
for _, part := range parts {
|
|
|
|
part = strings.TrimFunc(part, unicode.IsSpace)
|
|
|
|
subparts := strings.Split(part, "\n")
|
|
|
|
if len(subparts) < 2 {
|
|
|
|
return registeredServers, fmt.Errorf("Invalid format for source at [%s]", source.url)
|
|
|
|
}
|
|
|
|
name := strings.TrimFunc(subparts[0], unicode.IsSpace)
|
|
|
|
if len(name) == 0 {
|
|
|
|
return registeredServers, fmt.Errorf("Invalid format for source at [%s]", source.url)
|
|
|
|
}
|
|
|
|
var stampStr string
|
|
|
|
for _, subpart := range subparts {
|
|
|
|
subpart = strings.TrimFunc(subpart, unicode.IsSpace)
|
|
|
|
if strings.HasPrefix(subpart, "sdns://") {
|
|
|
|
stampStr = subpart
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(stampStr) < 8 {
|
|
|
|
return registeredServers, fmt.Errorf("Missing stamp for server [%s] in source from [%s]", name, source.url)
|
|
|
|
}
|
|
|
|
stamp, err := NewServerStampFromString(stampStr)
|
|
|
|
if err != nil {
|
|
|
|
return registeredServers, err
|
|
|
|
}
|
|
|
|
registeredServer := RegisteredServer{
|
|
|
|
name: name, stamp: stamp,
|
|
|
|
}
|
|
|
|
dlog.Debugf("Registered [%s] with stamp [%s]", name, stamp.String())
|
|
|
|
registeredServers = append(registeredServers, registeredServer)
|
|
|
|
}
|
|
|
|
return registeredServers, nil
|
|
|
|
}
|
|
|
|
|
2018-01-19 23:43:45 +01:00
|
|
|
func PrefetchSourceURL(urlToPrefetch *URLToPrefetch) error {
|
2018-01-20 00:30:33 +01:00
|
|
|
in, _, delayTillNextUpdate, err := fetchWithCache(urlToPrefetch.url, urlToPrefetch.cacheFile)
|
|
|
|
if err == nil {
|
|
|
|
AtomicFileWrite(urlToPrefetch.cacheFile, []byte(in))
|
2018-01-18 23:19:14 +01:00
|
|
|
}
|
2018-01-20 00:30:33 +01:00
|
|
|
urlToPrefetch.when = time.Now().Add(delayTillNextUpdate)
|
|
|
|
return err
|
2018-01-18 23:19:14 +01:00
|
|
|
}
|