dataDir needs maxima calculation to be correct (#13715)

there is a corner case where the new check
doesn't work where dataDir has changed, especially
when xl.json -> xl.meta healing happens, if some
healing is partial this can make certain backend
files unreadable.

This PR fixes and updates unit-tests
This commit is contained in:
Harshavardhana
2021-11-20 11:26:30 -08:00
committed by GitHub
parent 1e72e9b1cd
commit 36b5426f6e
2 changed files with 49 additions and 18 deletions

View File

@@ -30,12 +30,14 @@ func commonTime(modTimes []time.Time, dataDirs []string) (modTime time.Time, dat
var maxima int // Counter for remembering max occurrence of elements.
timeOccurenceMap := make(map[int64]int, len(modTimes))
dataDirMap := make(map[int64]string, len(modTimes))
// Ignore the uuid sentinel and count the rest.
for _, time := range modTimes {
if time.Equal(timeSentinel) {
for i, t := range modTimes {
if t.Equal(timeSentinel) {
continue
}
timeOccurenceMap[time.UnixNano()]++
dataDirMap[t.UnixNano()] = dataDirs[i]
timeOccurenceMap[t.UnixNano()]++
}
// Find the common cardinality from previously collected
@@ -44,18 +46,11 @@ func commonTime(modTimes []time.Time, dataDirs []string) (modTime time.Time, dat
t := time.Unix(0, nano).UTC()
if count > maxima || (count == maxima && t.After(modTime)) {
maxima = count
dataDir = dataDirMap[nano]
modTime = t
}
}
for i, ddataDir := range dataDirs {
if modTimes[i].Equal(modTime) {
// Return the data-dir that matches modTime.
dataDir = ddataDir
break
}
}
// Return the collected common uuid.
return modTime, dataDir
}