Improved error handling

This commit is contained in:
Frank Denis 2018-01-20 00:30:33 +01:00
parent 7fbb4c5428
commit 1c27d6c230
2 changed files with 72 additions and 91 deletions

View File

@ -187,10 +187,14 @@ func (proxy *Proxy) prefetcher(urlsToPrefetch *[]URLToPrefetch) {
urlToPrefetch := &(*urlsToPrefetch)[i] urlToPrefetch := &(*urlsToPrefetch)[i]
if now.After(urlToPrefetch.when) { if now.After(urlToPrefetch.when) {
dlog.Debugf("Prefetching [%s]", urlToPrefetch.url) dlog.Debugf("Prefetching [%s]", urlToPrefetch.url)
PrefetchSourceURL(urlToPrefetch) if err := PrefetchSourceURL(urlToPrefetch); err != nil {
dlog.Debugf("Prefetching [%s] failed: %s", err)
} else {
dlog.Debugf("Prefetching [%s] succeeded. Next refresh scheduled for %v", urlToPrefetch.url, urlToPrefetch.when)
}
} }
} }
time.Sleep(60 * time.Second) time.Sleep(5 * time.Second)
} }
}() }()
} }

View File

@ -2,6 +2,7 @@ package main
import ( import (
"encoding/csv" "encoding/csv"
"errors"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
@ -23,6 +24,7 @@ const (
const ( const (
SourcesUpdateDelayAfterFailure = time.Duration(1) * time.Minute SourcesUpdateDelayAfterFailure = time.Duration(1) * time.Minute
SourcesUpdateDelay = time.Duration(24) * time.Hour
) )
type Source struct { type Source struct {
@ -31,67 +33,59 @@ type Source struct {
in string in string
} }
func fetchFromCache(cacheFile string) ([]byte, error) { func fetchFromCache(cacheFile string) (in string, delayTillNextUpdate time.Duration, err error) {
dlog.Debugf("Loading source information from cache file [%s]", cacheFile) fi, err := os.Stat(cacheFile)
return ioutil.ReadFile(cacheFile) if err != nil {
delayTillNextUpdate = time.Duration(0)
return
}
elapsed := time.Since(fi.ModTime())
if elapsed < SourcesUpdateDelay {
dlog.Debugf("Cache file [%s] is still fresh", cacheFile)
delayTillNextUpdate = SourcesUpdateDelay - elapsed
} else {
dlog.Debugf("Cache file [%s] needs to be refreshed", cacheFile)
delayTillNextUpdate = time.Duration(0)
}
var bin []byte
bin, err = ioutil.ReadFile(cacheFile)
if err != nil {
delayTillNextUpdate = time.Duration(0)
return
}
in = string(bin)
return
} }
func fetchWithCache(url string, cacheFile string, refreshDelay time.Duration) (in string, cached bool, fromBackup bool, delayTillNextUpdate time.Duration, err error) { func fetchWithCache(url string, cacheFile string) (in string, cached bool, delayTillNextUpdate time.Duration, err error) {
var bin []byte cached = false
cached, fromBackup, usableCache, hotCache := false, false, false, false in, delayTillNextUpdate, err = fetchFromCache(cacheFile)
delayTillNextUpdate = refreshDelay
fi, err := os.Stat(cacheFile)
var elapsed time.Duration
if err == nil { if err == nil {
usableCache = true dlog.Debugf("Delay till next update: %v", delayTillNextUpdate)
dlog.Debugf("Cache file present for [%s]", url) cached = true
elapsed = time.Since(fi.ModTime()) return
if elapsed < refreshDelay && elapsed >= 0 {
hotCache = true
}
} }
if hotCache { var resp *http.Response
bin, err = fetchFromCache(cacheFile) dlog.Infof("Loading source information from URL [%s]", url)
if err == nil { resp, err = http.Get(url)
dlog.Debugf("Cache is still fresh for [%s]", url) if err == nil && resp != nil && (resp.StatusCode < 200 || resp.StatusCode > 299) {
cached = true err = fmt.Errorf("Webserver returned code %d", resp.StatusCode)
delayTillNextUpdate = refreshDelay - elapsed return
} } else if err != nil {
return
} else if resp == nil {
err = errors.New("Webserver returned an error")
return
} }
if !cached { var bin []byte
var resp *http.Response bin, err = ioutil.ReadAll(resp.Body)
dlog.Infof("Loading source information from URL [%s]", url) resp.Body.Close()
resp, err = http.Get(url) if err != nil {
if err == nil && resp != nil && (resp.StatusCode < 200 || resp.StatusCode > 299) { return
err = fmt.Errorf("Webserver returned code %d", resp.StatusCode)
}
if err != nil {
delayTillNextUpdate = time.Duration(0)
if usableCache {
dlog.Debugf("Falling back to cached version of [%s]", url)
bin, err = fetchFromCache(cacheFile)
}
if err != nil {
return
}
fromBackup = true
} else {
bin, err = ioutil.ReadAll(resp.Body)
resp.Body.Close()
if err != nil {
delayTillNextUpdate = time.Duration(0)
if usableCache {
bin, err = fetchFromCache(cacheFile)
}
if err != nil {
return
}
fromBackup = true
}
}
} }
err = nil err = nil
in = string(bin) in = string(bin)
delayTillNextUpdate = SourcesUpdateDelay
return return
} }
@ -106,6 +100,7 @@ type URLToPrefetch struct {
} }
func NewSource(url string, minisignKeyStr string, cacheFile string, formatStr string, refreshDelay time.Duration) (Source, []URLToPrefetch, error) { func NewSource(url string, minisignKeyStr string, cacheFile string, formatStr string, refreshDelay time.Duration) (Source, []URLToPrefetch, error) {
_ = refreshDelay
source := Source{url: url} source := Source{url: url}
if formatStr != "v1" { if formatStr != "v1" {
return source, []URLToPrefetch{}, fmt.Errorf("Unsupported source format: [%s]", formatStr) return source, []URLToPrefetch{}, fmt.Errorf("Unsupported source format: [%s]", formatStr)
@ -115,23 +110,17 @@ func NewSource(url string, minisignKeyStr string, cacheFile string, formatStr st
if err != nil { if err != nil {
return source, []URLToPrefetch{}, err return source, []URLToPrefetch{}, err
} }
now := time.Now()
urlsToPrefetch := []URLToPrefetch{}
sigURL := url + ".minisig" sigURL := url + ".minisig"
when := time.Now() in, cached, delayTillNextUpdate, err := fetchWithCache(url, cacheFile)
urlsToPrefetch := []URLToPrefetch{ urlsToPrefetch = append(urlsToPrefetch, URLToPrefetch{url: url, cacheFile: cacheFile, when: now.Add(delayTillNextUpdate)})
URLToPrefetch{url: url, cacheFile: cacheFile, when: when},
URLToPrefetch{url: sigURL, cacheFile: cacheFile, when: when},
}
in, cached, fromBackup, delayTillNextUpdate, err := fetchWithCache(url, cacheFile, refreshDelay)
if err != nil {
dlog.Debugf("Scheduling [%s] for prefetch", url)
return source, urlsToPrefetch, err
}
sigCacheFile := cacheFile + ".minisig" sigCacheFile := cacheFile + ".minisig"
sigStr, sigCached, sigFromBackup, sigDelayTillNextUpdate, err := fetchWithCache(sigURL, sigCacheFile, refreshDelay) sigStr, sigCached, sigDelayTillNextUpdate, err := fetchWithCache(sigURL, sigCacheFile)
if err != nil { urlsToPrefetch = append(urlsToPrefetch, URLToPrefetch{url: sigURL, cacheFile: sigCacheFile, when: now.Add(sigDelayTillNextUpdate)})
dlog.Debugf("Scheduling [%s] for prefetch", sigURL)
return source, urlsToPrefetch, err
}
signature, err := minisign.DecodeSignature(sigStr) signature, err := minisign.DecodeSignature(sigStr)
if err != nil { if err != nil {
os.Remove(cacheFile) os.Remove(cacheFile)
@ -144,27 +133,18 @@ func NewSource(url string, minisignKeyStr string, cacheFile string, formatStr st
os.Remove(sigCacheFile) os.Remove(sigCacheFile)
return source, urlsToPrefetch, err return source, urlsToPrefetch, err
} }
if !cached && !fromBackup { if !cached {
if err = AtomicFileWrite(cacheFile, []byte(in)); err != nil { if err = AtomicFileWrite(cacheFile, []byte(in)); err != nil {
return source, urlsToPrefetch, err dlog.Warnf("%s: %s", cacheFile, err)
} }
} }
if !sigCached && !fromBackup { if !sigCached {
if err = AtomicFileWrite(sigCacheFile, []byte(sigStr)); err != nil { if err = AtomicFileWrite(sigCacheFile, []byte(sigStr)); err != nil {
return source, urlsToPrefetch, err dlog.Warnf("%s: %s", sigCacheFile, err)
} }
} }
dlog.Noticef("Source [%s] loaded", url) dlog.Noticef("Source [%s] loaded", url)
source.in = in source.in = in
if sigDelayTillNextUpdate < delayTillNextUpdate {
delayTillNextUpdate = sigDelayTillNextUpdate
}
when = time.Now().Add(delayTillNextUpdate)
if !fromBackup && !sigFromBackup {
for i := range urlsToPrefetch {
urlsToPrefetch[i].when = when
}
}
return source, urlsToPrefetch, nil return source, urlsToPrefetch, nil
} }
@ -210,13 +190,10 @@ func (source *Source) Parse() ([]RegisteredServer, error) {
} }
func PrefetchSourceURL(urlToPrefetch *URLToPrefetch) error { func PrefetchSourceURL(urlToPrefetch *URLToPrefetch) error {
_, _, fromBackup, _, err := fetchWithCache(urlToPrefetch.url, urlToPrefetch.cacheFile, time.Duration(0)) in, _, delayTillNextUpdate, err := fetchWithCache(urlToPrefetch.url, urlToPrefetch.cacheFile)
if err != nil { if err == nil {
dlog.Debugf("[%s]: %s", urlToPrefetch.url, err) AtomicFileWrite(urlToPrefetch.cacheFile, []byte(in))
return err
} }
if !fromBackup { urlToPrefetch.when = time.Now().Add(delayTillNextUpdate)
urlToPrefetch.when = urlToPrefetch.when.Add(24 * time.Hour) return err
}
return nil
} }