preserve context per request for local locks (#9828)

In the Current bug we were re-using the context
from previously granted lockers, this would
lead to lock timeouts for existing valid
read or write locks, leading to premature
timeout of locks.

This bug affects only local lockers in FS
or standalone erasure coded mode. This issue
is rather historical as well and was present
in lsync for some time but we were lucky to
not see it.

Similar changes are done in dsync as well
to keep the code more familiar

Fixes #9827
This commit is contained in:
Harshavardhana
2020-06-14 07:43:10 -07:00
committed by GitHub
parent 535efd34a0
commit d55f4336ae
6 changed files with 103 additions and 111 deletions

View File

@@ -19,7 +19,6 @@
package dsync_test
import (
"context"
"fmt"
"log"
"math/rand"
@@ -89,7 +88,7 @@ func TestMain(m *testing.M) {
func TestSimpleLock(t *testing.T) {
dm := NewDRWMutex(context.Background(), ds, "test")
dm := NewDRWMutex(ds, "test")
dm.Lock(id, source)
@@ -101,7 +100,7 @@ func TestSimpleLock(t *testing.T) {
func TestSimpleLockUnlockMultipleTimes(t *testing.T) {
dm := NewDRWMutex(context.Background(), ds, "test")
dm := NewDRWMutex(ds, "test")
dm.Lock(id, source)
time.Sleep(time.Duration(10+(rand.Float32()*50)) * time.Millisecond)
@@ -127,8 +126,8 @@ func TestSimpleLockUnlockMultipleTimes(t *testing.T) {
// Test two locks for same resource, one succeeds, one fails (after timeout)
func TestTwoSimultaneousLocksForSameResource(t *testing.T) {
dm1st := NewDRWMutex(context.Background(), ds, "aap")
dm2nd := NewDRWMutex(context.Background(), ds, "aap")
dm1st := NewDRWMutex(ds, "aap")
dm2nd := NewDRWMutex(ds, "aap")
dm1st.Lock(id, source)
@@ -151,9 +150,9 @@ func TestTwoSimultaneousLocksForSameResource(t *testing.T) {
// Test three locks for same resource, one succeeds, one fails (after timeout)
func TestThreeSimultaneousLocksForSameResource(t *testing.T) {
dm1st := NewDRWMutex(context.Background(), ds, "aap")
dm2nd := NewDRWMutex(context.Background(), ds, "aap")
dm3rd := NewDRWMutex(context.Background(), ds, "aap")
dm1st := NewDRWMutex(ds, "aap")
dm2nd := NewDRWMutex(ds, "aap")
dm3rd := NewDRWMutex(ds, "aap")
dm1st.Lock(id, source)
@@ -216,8 +215,8 @@ func TestThreeSimultaneousLocksForSameResource(t *testing.T) {
// Test two locks for different resources, both succeed
func TestTwoSimultaneousLocksForDifferentResources(t *testing.T) {
dm1 := NewDRWMutex(context.Background(), ds, "aap")
dm2 := NewDRWMutex(context.Background(), ds, "noot")
dm1 := NewDRWMutex(ds, "aap")
dm2 := NewDRWMutex(ds, "noot")
dm1.Lock(id, source)
dm2.Lock(id, source)
@@ -247,7 +246,7 @@ func TestMutex(t *testing.T) {
loops = 5
}
c := make(chan bool)
m := NewDRWMutex(context.Background(), ds, "test")
m := NewDRWMutex(ds, "test")
for i := 0; i < 10; i++ {
go HammerMutex(m, loops, c)
}
@@ -261,7 +260,7 @@ func BenchmarkMutexUncontended(b *testing.B) {
*DRWMutex
}
b.RunParallel(func(pb *testing.PB) {
var mu = PaddedMutex{NewDRWMutex(context.Background(), ds, "")}
var mu = PaddedMutex{NewDRWMutex(ds, "")}
for pb.Next() {
mu.Lock(id, source)
mu.Unlock()
@@ -270,7 +269,7 @@ func BenchmarkMutexUncontended(b *testing.B) {
}
func benchmarkMutex(b *testing.B, slack, work bool) {
mu := NewDRWMutex(context.Background(), ds, "")
mu := NewDRWMutex(ds, "")
if slack {
b.SetParallelism(10)
}
@@ -313,7 +312,7 @@ func BenchmarkMutexNoSpin(b *testing.B) {
// These goroutines yield during local work, so that switching from
// a blocked goroutine to other goroutines is profitable.
// As a matter of fact, this benchmark still triggers some spinning in the mutex.
m := NewDRWMutex(context.Background(), ds, "")
m := NewDRWMutex(ds, "")
var acc0, acc1 uint64
b.SetParallelism(4)
b.RunParallel(func(pb *testing.PB) {
@@ -345,7 +344,7 @@ func BenchmarkMutexSpin(b *testing.B) {
// profitable. To achieve this we create a goroutine per-proc.
// These goroutines access considerable amount of local data so that
// unnecessary rescheduling is penalized by cache misses.
m := NewDRWMutex(context.Background(), ds, "")
m := NewDRWMutex(ds, "")
var acc0, acc1 uint64
b.RunParallel(func(pb *testing.PB) {
var data [16 << 10]uint64