Refactor saving downloads to cache
Moved writing to happen immediately after the download to reduce duplicated code and number of return values from the download function.
This commit is contained in:
parent
4c156784c8
commit
ad92be5b9c
|
@ -84,18 +84,13 @@ func fetchFromURL(xTransport *XTransport, u *url.URL) (bin []byte, err error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchWithCache(xTransport *XTransport, urlStr string, cacheFile string, refreshDelay time.Duration) (in string, cached bool, delayTillNextUpdate time.Duration, err error) {
|
func fetchWithCache(xTransport *XTransport, urlStr string, cacheFile string, refreshDelay time.Duration) (in string, delayTillNextUpdate time.Duration, err error) {
|
||||||
cached = false
|
|
||||||
expired := false
|
expired := false
|
||||||
in, expired, delayTillNextUpdate, err = fetchFromCache(cacheFile, refreshDelay)
|
in, expired, delayTillNextUpdate, err = fetchFromCache(cacheFile, refreshDelay)
|
||||||
if err == nil && !expired {
|
if err == nil && !expired {
|
||||||
dlog.Debugf("Delay till next update: %v", delayTillNextUpdate)
|
dlog.Debugf("Delay till next update: %v", delayTillNextUpdate)
|
||||||
cached = true
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if expired {
|
|
||||||
cached = true
|
|
||||||
}
|
|
||||||
if len(urlStr) == 0 {
|
if len(urlStr) == 0 {
|
||||||
if !expired {
|
if !expired {
|
||||||
err = fmt.Errorf("Cache file [%s] not present and no URL given to retrieve it", cacheFile)
|
err = fmt.Errorf("Cache file [%s] not present and no URL given to retrieve it", cacheFile)
|
||||||
|
@ -113,7 +108,11 @@ func fetchWithCache(xTransport *XTransport, urlStr string, cacheFile string, ref
|
||||||
if bin, err = fetchFromURL(xTransport, u); err != nil {
|
if bin, err = fetchFromURL(xTransport, u); err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
cached = false
|
if err = AtomicFileWrite(cacheFile, bin); err != nil {
|
||||||
|
if absPath, err2 := filepath.Abs(cacheFile); err2 == nil {
|
||||||
|
dlog.Warnf("%s: %s", absPath, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
in = string(bin)
|
in = string(bin)
|
||||||
delayTillNextUpdate = MinSourcesUpdateDelay
|
delayTillNextUpdate = MinSourcesUpdateDelay
|
||||||
return
|
return
|
||||||
|
@ -146,19 +145,18 @@ func NewSource(xTransport *XTransport, urls []string, minisignKeyStr string, cac
|
||||||
sigCacheFile := cacheFile + ".minisig"
|
sigCacheFile := cacheFile + ".minisig"
|
||||||
|
|
||||||
var sigStr, in string
|
var sigStr, in string
|
||||||
var cached, sigCached bool
|
|
||||||
var delayTillNextUpdate, sigDelayTillNextUpdate time.Duration
|
var delayTillNextUpdate, sigDelayTillNextUpdate time.Duration
|
||||||
var err, sigErr error
|
var err, sigErr error
|
||||||
var preloadURL string
|
var preloadURL string
|
||||||
if len(urls) <= 0 {
|
if len(urls) <= 0 {
|
||||||
in, cached, delayTillNextUpdate, err = fetchWithCache(xTransport, "", cacheFile, refreshDelay)
|
in, delayTillNextUpdate, err = fetchWithCache(xTransport, "", cacheFile, refreshDelay)
|
||||||
sigStr, sigCached, sigDelayTillNextUpdate, sigErr = fetchWithCache(xTransport, "", sigCacheFile, refreshDelay)
|
sigStr, sigDelayTillNextUpdate, sigErr = fetchWithCache(xTransport, "", sigCacheFile, refreshDelay)
|
||||||
} else {
|
} else {
|
||||||
preloadURL = urls[0]
|
preloadURL = urls[0]
|
||||||
for _, url := range urls {
|
for _, url := range urls {
|
||||||
sigURL := url + ".minisig"
|
sigURL := url + ".minisig"
|
||||||
in, cached, delayTillNextUpdate, err = fetchWithCache(xTransport, url, cacheFile, refreshDelay)
|
in, delayTillNextUpdate, err = fetchWithCache(xTransport, url, cacheFile, refreshDelay)
|
||||||
sigStr, sigCached, sigDelayTillNextUpdate, sigErr = fetchWithCache(xTransport, sigURL, sigCacheFile, refreshDelay)
|
sigStr, sigDelayTillNextUpdate, sigErr = fetchWithCache(xTransport, sigURL, sigCacheFile, refreshDelay)
|
||||||
if err == nil && sigErr == nil {
|
if err == nil && sigErr == nil {
|
||||||
preloadURL = url
|
preloadURL = url
|
||||||
break
|
break
|
||||||
|
@ -182,20 +180,6 @@ func NewSource(xTransport *XTransport, urls []string, minisignKeyStr string, cac
|
||||||
if err = source.checkSignature(in, sigStr); err != nil {
|
if err = source.checkSignature(in, sigStr); err != nil {
|
||||||
return source, urlsToPrefetch, err
|
return source, urlsToPrefetch, err
|
||||||
}
|
}
|
||||||
if !cached {
|
|
||||||
if err = AtomicFileWrite(cacheFile, []byte(in)); err != nil {
|
|
||||||
if absPath, err2 := filepath.Abs(cacheFile); err2 == nil {
|
|
||||||
dlog.Warnf("%s: %s", absPath, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !sigCached {
|
|
||||||
if err = AtomicFileWrite(sigCacheFile, []byte(sigStr)); err != nil {
|
|
||||||
if absPath, err2 := filepath.Abs(sigCacheFile); err2 == nil {
|
|
||||||
dlog.Warnf("%s: %s", absPath, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dlog.Noticef("Source [%s] loaded", cacheFile)
|
dlog.Noticef("Source [%s] loaded", cacheFile)
|
||||||
source.in = in
|
source.in = in
|
||||||
return source, urlsToPrefetch, nil
|
return source, urlsToPrefetch, nil
|
||||||
|
@ -276,10 +260,7 @@ PartsLoop:
|
||||||
}
|
}
|
||||||
|
|
||||||
func PrefetchSourceURL(xTransport *XTransport, urlToPrefetch *URLToPrefetch) error {
|
func PrefetchSourceURL(xTransport *XTransport, urlToPrefetch *URLToPrefetch) error {
|
||||||
in, cached, delayTillNextUpdate, err := fetchWithCache(xTransport, urlToPrefetch.url, urlToPrefetch.cacheFile, MinSourcesUpdateDelay)
|
_, delayTillNextUpdate, err := fetchWithCache(xTransport, urlToPrefetch.url, urlToPrefetch.cacheFile, MinSourcesUpdateDelay)
|
||||||
if err == nil && !cached {
|
|
||||||
AtomicFileWrite(urlToPrefetch.cacheFile, []byte(in))
|
|
||||||
}
|
|
||||||
urlToPrefetch.when = timeNow().Add(delayTillNextUpdate)
|
urlToPrefetch.when = timeNow().Add(delayTillNextUpdate)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue