continous healing based on crawler (#10103)

Design: https://gist.github.com/klauspost/792fe25c315caf1dd15c8e79df124914
This commit is contained in:
Klaus Post
2020-08-24 13:47:01 -07:00
committed by GitHub
parent caad314faa
commit c097ce9c32
13 changed files with 205 additions and 136 deletions

View File

@@ -26,7 +26,6 @@ import (
"github.com/minio/minio/cmd/logger"
"github.com/minio/minio/pkg/bpool"
"github.com/minio/minio/pkg/color"
"github.com/minio/minio/pkg/dsync"
"github.com/minio/minio/pkg/madmin"
"github.com/minio/minio/pkg/sync/errgroup"
@@ -294,20 +293,12 @@ func (er erasureObjects) crawlAndGetDataUsage(ctx context.Context, buckets []Buc
}
}
// Add existing buckets if changes or lifecycles.
// Add existing buckets.
for _, b := range buckets {
e := oldCache.find(b.Name)
if e != nil {
cache.replace(b.Name, dataUsageRoot, *e)
lc, err := globalLifecycleSys.Get(b.Name)
activeLC := err == nil && lc.HasActiveRules("", true)
if activeLC || bf == nil || bf.containsDir(b.Name) {
bucketCh <- b
} else {
if intDataUpdateTracker.debug {
logger.Info(color.Green("crawlAndGetDataUsage:")+" Skipping bucket %v, not updated", b.Name)
}
}
bucketCh <- b
}
}