dnscrypt-proxy/dnscrypt-proxy/sources.go

326 lines
9.0 KiB
Go
Raw Normal View History

2018-01-13 23:52:44 +01:00
package main
import (
"encoding/csv"
2018-01-20 00:30:33 +01:00
"errors"
2018-01-13 23:52:44 +01:00
"fmt"
2018-02-04 11:31:54 +01:00
"io"
2018-01-13 23:52:44 +01:00
"io/ioutil"
"net/http"
"net/url"
2018-01-13 23:52:44 +01:00
"os"
"path/filepath"
2018-01-13 23:52:44 +01:00
"strings"
"time"
"unicode"
2018-01-13 23:52:44 +01:00
"github.com/dchest/safefile"
"github.com/jedisct1/dlog"
2018-04-18 18:58:39 +02:00
stamps "github.com/jedisct1/go-dnsstamps"
2018-01-13 23:52:44 +01:00
"github.com/jedisct1/go-minisign"
)
type SourceFormat int
const (
SourceFormatV1 = iota
SourceFormatV2
2018-01-13 23:52:44 +01:00
)
const (
2018-01-20 01:00:19 +01:00
SourcesUpdateDelay = time.Duration(24) * time.Hour
)
2018-01-13 23:52:44 +01:00
type Source struct {
urls []string
2018-01-13 23:52:44 +01:00
format SourceFormat
in string
}
func fetchFromCache(cacheFile string) (in string, expired bool, delayTillNextUpdate time.Duration, err error) {
expired = false
2018-01-20 00:30:33 +01:00
fi, err := os.Stat(cacheFile)
if err != nil {
dlog.Debugf("Cache file [%s] not present", cacheFile)
2018-01-20 00:30:33 +01:00
delayTillNextUpdate = time.Duration(0)
return
}
elapsed := time.Since(fi.ModTime())
if elapsed < SourcesUpdateDelay {
dlog.Debugf("Cache file [%s] is still fresh", cacheFile)
delayTillNextUpdate = SourcesUpdateDelay - elapsed
} else {
dlog.Debugf("Cache file [%s] needs to be refreshed", cacheFile)
delayTillNextUpdate = time.Duration(0)
}
var bin []byte
bin, err = ioutil.ReadFile(cacheFile)
if err != nil {
delayTillNextUpdate = time.Duration(0)
return
}
in = string(bin)
if delayTillNextUpdate <= time.Duration(0) {
expired = true
}
2018-01-20 00:30:33 +01:00
return
2018-01-13 23:52:44 +01:00
}
func fetchWithCache(xTransport *XTransport, urlStr string, cacheFile string) (in string, cached bool, delayTillNextUpdate time.Duration, err error) {
2018-01-20 00:30:33 +01:00
cached = false
expired := false
in, expired, delayTillNextUpdate, err = fetchFromCache(cacheFile)
if err == nil && !expired {
2018-01-20 00:30:33 +01:00
dlog.Debugf("Delay till next update: %v", delayTillNextUpdate)
cached = true
return
}
if expired {
cached = true
}
if len(urlStr) == 0 {
if !expired {
err = fmt.Errorf("Cache file [%s] not present and no URL given to retrieve it", cacheFile)
}
return
}
2018-01-20 00:30:33 +01:00
var resp *http.Response
dlog.Infof("Loading source information from URL [%s]", urlStr)
url, err := url.Parse(urlStr)
if err != nil {
return
}
resp, _, err = xTransport.Get(url, "", 30*time.Second)
2018-01-20 00:30:33 +01:00
if err == nil && resp != nil && (resp.StatusCode < 200 || resp.StatusCode > 299) {
err = fmt.Errorf("Webserver returned code %d", resp.StatusCode)
return
} else if err != nil {
return
} else if resp == nil {
err = errors.New("Webserver returned an error")
return
2018-01-13 23:52:44 +01:00
}
2018-01-20 00:30:33 +01:00
var bin []byte
2018-02-04 11:31:54 +01:00
bin, err = ioutil.ReadAll(io.LimitReader(resp.Body, MaxHTTPBodyLength))
2018-01-20 00:30:33 +01:00
resp.Body.Close()
if err != nil {
return
2018-01-13 23:52:44 +01:00
}
2018-01-18 23:31:14 +01:00
err = nil
cached = false
2018-01-13 23:52:44 +01:00
in = string(bin)
2018-01-20 00:30:33 +01:00
delayTillNextUpdate = SourcesUpdateDelay
2018-01-13 23:52:44 +01:00
return
}
func AtomicFileWrite(file string, data []byte) error {
return safefile.WriteFile(file, data, 0644)
}
type URLToPrefetch struct {
url string
cacheFile string
when time.Time
}
func NewSource(xTransport *XTransport, urls []string, minisignKeyStr string, cacheFile string, formatStr string, refreshDelay time.Duration) (Source, []URLToPrefetch, error) {
2018-01-20 00:30:33 +01:00
_ = refreshDelay
source := Source{urls: urls}
if formatStr == "v1" {
source.format = SourceFormatV1
} else if formatStr == "v2" {
source.format = SourceFormatV2
} else {
return source, []URLToPrefetch{}, fmt.Errorf("Unsupported source format: [%s]", formatStr)
2018-01-13 23:52:44 +01:00
}
minisignKey, err := minisign.NewPublicKey(minisignKeyStr)
if err != nil {
return source, []URLToPrefetch{}, err
}
2018-01-20 00:30:33 +01:00
now := time.Now()
urlsToPrefetch := []URLToPrefetch{}
2018-01-13 23:52:44 +01:00
sigCacheFile := cacheFile + ".minisig"
2018-01-20 00:30:33 +01:00
var sigStr, in string
var cached, sigCached bool
var delayTillNextUpdate, sigDelayTillNextUpdate time.Duration
var sigErr error
var preloadURL string
if len(urls) <= 0 {
in, cached, delayTillNextUpdate, err = fetchWithCache(xTransport, "", cacheFile)
sigStr, sigCached, sigDelayTillNextUpdate, sigErr = fetchWithCache(xTransport, "", sigCacheFile)
} else {
preloadURL = urls[0]
for _, url := range urls {
sigURL := url + ".minisig"
in, cached, delayTillNextUpdate, err = fetchWithCache(xTransport, url, cacheFile)
sigStr, sigCached, sigDelayTillNextUpdate, sigErr = fetchWithCache(xTransport, sigURL, sigCacheFile)
if err == nil && sigErr == nil {
preloadURL = url
break
}
dlog.Infof("Loading from [%s] failed", url)
}
}
if len(preloadURL) > 0 {
url := preloadURL
sigURL := url + ".minisig"
urlsToPrefetch = append(urlsToPrefetch, URLToPrefetch{url: url, cacheFile: cacheFile, when: now.Add(delayTillNextUpdate)})
urlsToPrefetch = append(urlsToPrefetch, URLToPrefetch{url: sigURL, cacheFile: sigCacheFile, when: now.Add(sigDelayTillNextUpdate)})
}
if sigErr != nil && err == nil {
err = sigErr
}
if err != nil {
return source, urlsToPrefetch, err
2018-01-20 01:00:19 +01:00
}
2018-01-13 23:52:44 +01:00
signature, err := minisign.DecodeSignature(sigStr)
if err != nil {
os.Remove(cacheFile)
os.Remove(sigCacheFile)
return source, urlsToPrefetch, err
2018-01-13 23:52:44 +01:00
}
res, err := minisignKey.Verify([]byte(in), signature)
2018-01-18 14:28:05 +01:00
if err != nil || !res {
os.Remove(cacheFile)
os.Remove(sigCacheFile)
return source, urlsToPrefetch, err
2018-01-13 23:52:44 +01:00
}
2018-01-20 00:30:33 +01:00
if !cached {
2018-01-13 23:52:44 +01:00
if err = AtomicFileWrite(cacheFile, []byte(in)); err != nil {
if absPath, err2 := filepath.Abs(cacheFile); err2 == nil {
dlog.Warnf("%s: %s", absPath, err)
}
2018-01-13 23:52:44 +01:00
}
}
2018-01-20 00:30:33 +01:00
if !sigCached {
2018-01-13 23:52:44 +01:00
if err = AtomicFileWrite(sigCacheFile, []byte(sigStr)); err != nil {
if absPath, err2 := filepath.Abs(sigCacheFile); err2 == nil {
dlog.Warnf("%s: %s", absPath, err)
}
2018-01-13 23:52:44 +01:00
}
}
dlog.Noticef("Source [%s] loaded", cacheFile)
2018-01-13 23:52:44 +01:00
source.in = in
return source, urlsToPrefetch, nil
2018-01-13 23:52:44 +01:00
}
func (source *Source) Parse(prefix string) ([]RegisteredServer, error) {
if source.format == SourceFormatV1 {
return source.parseV1(prefix)
} else if source.format == SourceFormatV2 {
return source.parseV2(prefix)
}
dlog.Fatal("Unexpected source format")
return []RegisteredServer{}, nil
}
func (source *Source) parseV1(prefix string) ([]RegisteredServer, error) {
var registeredServers []RegisteredServer
csvReader := csv.NewReader(strings.NewReader(source.in))
records, err := csvReader.ReadAll()
if err != nil {
return registeredServers, nil
}
for lineNo, record := range records {
if len(record) == 0 {
continue
}
if len(record) < 14 {
return registeredServers, fmt.Errorf("Parse error at line %d", 1+lineNo)
}
if lineNo == 0 {
continue
}
name := prefix + record[0]
description := record[2]
serverAddrStr := record[10]
providerName := record[11]
serverPkStr := record[12]
2018-04-14 15:03:21 +02:00
props := stamps.ServerInformalProperties(0)
if strings.EqualFold(record[7], "yes") {
2018-04-14 15:03:21 +02:00
props |= stamps.ServerInformalPropertyDNSSEC
}
if strings.EqualFold(record[8], "yes") {
2018-04-14 15:03:21 +02:00
props |= stamps.ServerInformalPropertyNoLog
}
2018-04-14 15:03:21 +02:00
stamp, err := stamps.NewDNSCryptServerStampFromLegacy(serverAddrStr, serverPkStr, providerName, props)
if err != nil {
return registeredServers, err
}
registeredServer := RegisteredServer{
name: name, stamp: stamp, description: description,
}
dlog.Debugf("Registered [%s] with stamp [%s]", name, stamp.String())
registeredServers = append(registeredServers, registeredServer)
}
return registeredServers, nil
}
func (source *Source) parseV2(prefix string) ([]RegisteredServer, error) {
var registeredServers []RegisteredServer
in := string(source.in)
parts := strings.Split(in, "## ")
if len(parts) < 2 {
return registeredServers, fmt.Errorf("Invalid format for source at [%v]", source.urls)
}
parts = parts[1:]
for _, part := range parts {
part = strings.TrimFunc(part, unicode.IsSpace)
subparts := strings.Split(part, "\n")
if len(subparts) < 2 {
return registeredServers, fmt.Errorf("Invalid format for source at [%v]", source.urls)
}
name := strings.TrimFunc(subparts[0], unicode.IsSpace)
if len(name) == 0 {
return registeredServers, fmt.Errorf("Invalid format for source at [%v]", source.urls)
}
subparts = subparts[1:]
name = prefix + name
var stampStr, description string
for _, subpart := range subparts {
subpart = strings.TrimFunc(subpart, unicode.IsSpace)
if strings.HasPrefix(subpart, "sdns://") {
if len(stampStr) > 0 {
return registeredServers, fmt.Errorf("Multiple stamps for server [%s] in source from [%v]", name, source.urls)
}
stampStr = subpart
continue
} else if len(subpart) == 0 || strings.HasPrefix(subpart, "//") {
continue
}
if len(description) > 0 {
description += "\n"
}
description += subpart
}
if len(stampStr) < 8 {
return registeredServers, fmt.Errorf("Missing stamp for server [%s] in source from [%v]", name, source.urls)
}
2018-04-14 15:03:21 +02:00
stamp, err := stamps.NewServerStampFromString(stampStr)
if err != nil {
return registeredServers, err
}
registeredServer := RegisteredServer{
name: name, stamp: stamp, description: description,
}
dlog.Debugf("Registered [%s] with stamp [%s]", name, stamp.String())
registeredServers = append(registeredServers, registeredServer)
}
return registeredServers, nil
}
func PrefetchSourceURL(xTransport *XTransport, urlToPrefetch *URLToPrefetch) error {
in, cached, delayTillNextUpdate, err := fetchWithCache(xTransport, urlToPrefetch.url, urlToPrefetch.cacheFile)
2018-02-19 19:31:34 +01:00
if err == nil && !cached {
2018-01-20 00:30:33 +01:00
AtomicFileWrite(urlToPrefetch.cacheFile, []byte(in))
}
2018-01-20 00:30:33 +01:00
urlToPrefetch.when = time.Now().Add(delayTillNextUpdate)
return err
}