fix: use HealObject for cleaning up dangling objects (#11171)

main reason is that HealObjects starts a recursive listing
for each object, this can be a really really long time on
large namespaces instead avoid recursive listing just
perform HealObject() instead at the prefix.

delete's already handle purging dangling content, we
don't need to achieve this by doing recursive listing,
this in-turn can delay crawling significantly.
This commit is contained in:
Harshavardhana 2020-12-27 15:42:20 -08:00 committed by GitHub
parent c19e6ce773
commit d8d25a308f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -611,18 +611,9 @@ func (f *folderScanner) scanQueuedLevels(ctx context.Context, folders []cachedFo
if f.dataUsageCrawlDebug {
logger.Info(color.Green("healObjects:")+" deleting dangling directory %s", prefix)
}
// If we have quorum, found directories, but no objects, issue heal to delete the dangling.
objAPI.HealObjects(ctx, bucket, prefix, madmin.HealOpts{Recursive: true, Remove: true},
func(bucket, object, versionID string) error {
// Wait for each heal as per crawler frequency.
wait()
wait = crawlerSleeper.Timer(ctx)
return bgSeq.queueHealTask(healSource{
bucket: bucket,
object: object,
versionID: versionID,
}, madmin.HealItemObject)
})
objAPI.HealObject(ctx, bucket, prefix, "", madmin.HealOpts{Recursive: true, Remove: true})
}
wait()