Reduce redundant crawler logging (#11448)

This commit is contained in:
Ritesh H Shukla 2021-02-05 15:51:11 -08:00 committed by GitHub
parent 99b733d44c
commit 5fe4bb6b36
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -276,7 +276,6 @@ func (er erasureObjects) getOnlineDisksWithHealing() (newDisks []StorageAPI, hea
// Updates are sent on a regular basis and the caller *must* consume them.
func (er erasureObjects) crawlAndGetDataUsage(ctx context.Context, buckets []BucketInfo, bf *bloomFilter, updates chan<- dataUsageCache) error {
if len(buckets) == 0 {
logger.Info(color.Green("data-crawl:") + " No buckets found, skipping crawl")
return nil
}