2018-01-13 23:52:44 +01:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/csv"
|
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/dchest/safefile"
|
|
|
|
|
|
|
|
"github.com/jedisct1/dlog"
|
|
|
|
"github.com/jedisct1/go-minisign"
|
|
|
|
)
|
|
|
|
|
|
|
|
type SourceFormat int
|
|
|
|
|
|
|
|
const (
|
|
|
|
SourceFormatV1 = iota
|
|
|
|
)
|
|
|
|
|
2018-01-18 23:19:14 +01:00
|
|
|
const (
|
|
|
|
SourcesUpdateDelayAfterFailure = time.Duration(1) * time.Minute
|
|
|
|
)
|
|
|
|
|
2018-01-13 23:52:44 +01:00
|
|
|
type Source struct {
|
|
|
|
url string
|
|
|
|
format SourceFormat
|
|
|
|
in string
|
|
|
|
}
|
|
|
|
|
|
|
|
func fetchFromCache(cacheFile string) ([]byte, error) {
|
2018-01-19 23:43:45 +01:00
|
|
|
dlog.Debugf("Loading source information from cache file [%s]", cacheFile)
|
2018-01-13 23:52:44 +01:00
|
|
|
return ioutil.ReadFile(cacheFile)
|
|
|
|
}
|
|
|
|
|
2018-01-18 23:54:53 +01:00
|
|
|
func fetchWithCache(url string, cacheFile string, refreshDelay time.Duration) (in string, cached bool, fromBackup bool, delayTillNextUpdate time.Duration, err error) {
|
2018-01-13 23:52:44 +01:00
|
|
|
var bin []byte
|
2018-01-18 23:54:53 +01:00
|
|
|
cached, fromBackup, usableCache, hotCache := false, false, false, false
|
2018-01-18 23:19:14 +01:00
|
|
|
delayTillNextUpdate = refreshDelay
|
2018-01-13 23:52:44 +01:00
|
|
|
fi, err := os.Stat(cacheFile)
|
2018-01-18 23:19:14 +01:00
|
|
|
var elapsed time.Duration
|
2018-01-13 23:52:44 +01:00
|
|
|
if err == nil {
|
2018-01-18 22:23:40 +01:00
|
|
|
usableCache = true
|
2018-01-18 23:19:14 +01:00
|
|
|
dlog.Debugf("Cache file present for [%s]", url)
|
|
|
|
elapsed = time.Since(fi.ModTime())
|
2018-01-13 23:52:44 +01:00
|
|
|
if elapsed < refreshDelay && elapsed >= 0 {
|
2018-01-18 22:23:40 +01:00
|
|
|
hotCache = true
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
}
|
2018-01-18 22:23:40 +01:00
|
|
|
if hotCache {
|
2018-01-13 23:52:44 +01:00
|
|
|
bin, err = fetchFromCache(cacheFile)
|
2018-01-14 00:20:22 +01:00
|
|
|
if err == nil {
|
2018-01-18 23:19:14 +01:00
|
|
|
dlog.Debugf("Cache is still fresh for [%s]", url)
|
2018-01-14 00:20:22 +01:00
|
|
|
cached = true
|
2018-01-18 23:19:14 +01:00
|
|
|
delayTillNextUpdate = refreshDelay - elapsed
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-01-14 00:20:22 +01:00
|
|
|
}
|
|
|
|
if !cached {
|
2018-01-13 23:52:44 +01:00
|
|
|
var resp *http.Response
|
|
|
|
dlog.Infof("Loading source information from URL [%s]", url)
|
|
|
|
resp, err = http.Get(url)
|
2018-01-18 23:54:53 +01:00
|
|
|
if err == nil && resp != nil && (resp.StatusCode < 200 || resp.StatusCode > 299) {
|
2018-01-18 23:31:14 +01:00
|
|
|
err = fmt.Errorf("Webserver returned code %d", resp.StatusCode)
|
|
|
|
}
|
2018-01-13 23:52:44 +01:00
|
|
|
if err != nil {
|
2018-01-19 23:43:45 +01:00
|
|
|
delayTillNextUpdate = time.Duration(0)
|
2018-01-13 23:52:44 +01:00
|
|
|
if usableCache {
|
2018-01-18 23:19:14 +01:00
|
|
|
dlog.Debugf("Falling back to cached version of [%s]", url)
|
2018-01-13 23:52:44 +01:00
|
|
|
bin, err = fetchFromCache(cacheFile)
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
2018-01-18 23:54:53 +01:00
|
|
|
fromBackup = true
|
2018-01-16 00:37:04 +01:00
|
|
|
} else {
|
|
|
|
bin, err = ioutil.ReadAll(resp.Body)
|
|
|
|
resp.Body.Close()
|
2018-01-13 23:52:44 +01:00
|
|
|
if err != nil {
|
2018-01-19 23:43:45 +01:00
|
|
|
delayTillNextUpdate = time.Duration(0)
|
2018-01-16 00:37:04 +01:00
|
|
|
if usableCache {
|
|
|
|
bin, err = fetchFromCache(cacheFile)
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
2018-01-18 23:54:53 +01:00
|
|
|
fromBackup = true
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-01-18 23:31:14 +01:00
|
|
|
err = nil
|
2018-01-13 23:52:44 +01:00
|
|
|
in = string(bin)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func AtomicFileWrite(file string, data []byte) error {
|
|
|
|
return safefile.WriteFile(file, data, 0644)
|
|
|
|
}
|
|
|
|
|
2018-01-18 23:19:14 +01:00
|
|
|
type URLToPrefetch struct {
|
|
|
|
url string
|
|
|
|
cacheFile string
|
|
|
|
when time.Time
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewSource(url string, minisignKeyStr string, cacheFile string, formatStr string, refreshDelay time.Duration) (Source, []URLToPrefetch, error) {
|
2018-01-13 23:52:44 +01:00
|
|
|
source := Source{url: url}
|
|
|
|
if formatStr != "v1" {
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, []URLToPrefetch{}, fmt.Errorf("Unsupported source format: [%s]", formatStr)
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
source.format = SourceFormatV1
|
|
|
|
minisignKey, err := minisign.NewPublicKey(minisignKeyStr)
|
|
|
|
if err != nil {
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, []URLToPrefetch{}, err
|
|
|
|
}
|
|
|
|
sigURL := url + ".minisig"
|
2018-01-19 23:43:45 +01:00
|
|
|
when := time.Now()
|
2018-01-18 23:19:14 +01:00
|
|
|
urlsToPrefetch := []URLToPrefetch{
|
|
|
|
URLToPrefetch{url: url, cacheFile: cacheFile, when: when},
|
|
|
|
URLToPrefetch{url: sigURL, cacheFile: cacheFile, when: when},
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-01-18 23:54:53 +01:00
|
|
|
in, cached, fromBackup, delayTillNextUpdate, err := fetchWithCache(url, cacheFile, refreshDelay)
|
2018-01-13 23:52:44 +01:00
|
|
|
if err != nil {
|
2018-01-19 00:06:18 +01:00
|
|
|
dlog.Debugf("Scheduling [%s] for prefetch", url)
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, err
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
sigCacheFile := cacheFile + ".minisig"
|
2018-01-18 23:54:53 +01:00
|
|
|
sigStr, sigCached, sigFromBackup, sigDelayTillNextUpdate, err := fetchWithCache(sigURL, sigCacheFile, refreshDelay)
|
2018-01-13 23:52:44 +01:00
|
|
|
if err != nil {
|
2018-01-19 00:06:18 +01:00
|
|
|
dlog.Debugf("Scheduling [%s] for prefetch", sigURL)
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, err
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
signature, err := minisign.DecodeSignature(sigStr)
|
|
|
|
if err != nil {
|
2018-01-18 23:33:30 +01:00
|
|
|
os.Remove(cacheFile)
|
|
|
|
os.Remove(sigCacheFile)
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, err
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
res, err := minisignKey.Verify([]byte(in), signature)
|
2018-01-18 14:28:05 +01:00
|
|
|
if err != nil || !res {
|
2018-01-18 23:33:30 +01:00
|
|
|
os.Remove(cacheFile)
|
|
|
|
os.Remove(sigCacheFile)
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, err
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-01-19 23:43:45 +01:00
|
|
|
if !cached && !fromBackup {
|
2018-01-13 23:52:44 +01:00
|
|
|
if err = AtomicFileWrite(cacheFile, []byte(in)); err != nil {
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, err
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
}
|
2018-01-19 23:43:45 +01:00
|
|
|
if !sigCached && !fromBackup {
|
2018-01-13 23:52:44 +01:00
|
|
|
if err = AtomicFileWrite(sigCacheFile, []byte(sigStr)); err != nil {
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, err
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
dlog.Noticef("Source [%s] loaded", url)
|
|
|
|
source.in = in
|
2018-01-18 23:19:14 +01:00
|
|
|
if sigDelayTillNextUpdate < delayTillNextUpdate {
|
|
|
|
delayTillNextUpdate = sigDelayTillNextUpdate
|
|
|
|
}
|
|
|
|
when = time.Now().Add(delayTillNextUpdate)
|
2018-01-18 23:54:53 +01:00
|
|
|
if !fromBackup && !sigFromBackup {
|
2018-01-19 23:43:45 +01:00
|
|
|
for i := range urlsToPrefetch {
|
|
|
|
urlsToPrefetch[i].when = when
|
|
|
|
}
|
2018-01-18 23:54:53 +01:00
|
|
|
}
|
2018-01-18 23:19:14 +01:00
|
|
|
return source, urlsToPrefetch, nil
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func (source *Source) Parse() ([]RegisteredServer, error) {
|
|
|
|
var registeredServers []RegisteredServer
|
|
|
|
|
|
|
|
csvReader := csv.NewReader(strings.NewReader(source.in))
|
|
|
|
records, err := csvReader.ReadAll()
|
|
|
|
if err != nil {
|
|
|
|
return registeredServers, nil
|
|
|
|
}
|
2018-01-17 02:40:47 +01:00
|
|
|
for lineNo, record := range records {
|
2018-01-13 23:53:33 +01:00
|
|
|
if len(record) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
2018-01-13 23:52:44 +01:00
|
|
|
if len(record) < 14 {
|
2018-01-17 09:44:03 +01:00
|
|
|
return registeredServers, fmt.Errorf("Parse error at line %d", 1+lineNo)
|
2018-01-13 23:52:44 +01:00
|
|
|
}
|
2018-01-17 02:40:47 +01:00
|
|
|
if lineNo == 0 {
|
2018-01-13 23:52:44 +01:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
name := record[0]
|
|
|
|
serverAddrStr := record[10]
|
|
|
|
providerName := record[11]
|
|
|
|
serverPkStr := record[12]
|
2018-01-18 13:01:16 +01:00
|
|
|
props := ServerInformalProperties(0)
|
|
|
|
if strings.EqualFold(record[7], "yes") {
|
|
|
|
props |= ServerInformalPropertyDNSSEC
|
|
|
|
}
|
|
|
|
if strings.EqualFold(record[8], "yes") {
|
|
|
|
props |= ServerInformalPropertyNoLog
|
|
|
|
}
|
|
|
|
stamp, err := NewServerStampFromLegacy(serverAddrStr, serverPkStr, providerName, props)
|
2018-01-13 23:52:44 +01:00
|
|
|
if err != nil {
|
|
|
|
return registeredServers, err
|
|
|
|
}
|
|
|
|
registeredServer := RegisteredServer{
|
|
|
|
name: name, stamp: stamp,
|
|
|
|
}
|
|
|
|
registeredServers = append(registeredServers, registeredServer)
|
|
|
|
}
|
|
|
|
return registeredServers, nil
|
|
|
|
}
|
2018-01-18 23:19:14 +01:00
|
|
|
|
2018-01-19 23:43:45 +01:00
|
|
|
func PrefetchSourceURL(urlToPrefetch *URLToPrefetch) error {
|
|
|
|
_, _, fromBackup, _, err := fetchWithCache(urlToPrefetch.url, urlToPrefetch.cacheFile, time.Duration(0))
|
|
|
|
if err != nil {
|
|
|
|
dlog.Debugf("[%s]: %s", urlToPrefetch.url, err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if !fromBackup {
|
|
|
|
urlToPrefetch.when = urlToPrefetch.when.Add(24 * time.Hour)
|
2018-01-18 23:19:14 +01:00
|
|
|
}
|
2018-01-19 23:43:45 +01:00
|
|
|
return nil
|
2018-01-18 23:19:14 +01:00
|
|
|
}
|