fix: CrawlAndGetDataUsage close pipe() before using a new one (#11600)

also additionally make sure errors during deserializer closes
the reader with right error type such that Write() end
actually see the final error, this avoids a waitGroup usage
and waiting.
This commit is contained in:
Harshavardhana
2021-02-22 10:04:32 -08:00
committed by GitHub
parent 8778828a03
commit c31d2c3fdc
3 changed files with 8 additions and 12 deletions

View File

@@ -420,9 +420,10 @@ func (er erasureObjects) crawlAndGetDataUsage(ctx context.Context, buckets []Buc
cache, err = disk.CrawlAndGetDataUsage(ctx, cache)
cache.Info.BloomFilter = nil
if err != nil {
logger.LogIf(ctx, err)
if cache.Info.LastUpdate.After(before) {
if !cache.Info.LastUpdate.IsZero() && cache.Info.LastUpdate.After(before) {
logger.LogIf(ctx, cache.save(ctx, er, cacheName))
} else {
logger.LogIf(ctx, err)
}
continue
}