Merge new changes from minio-io/donut; minio-io/objectdriver

This commit is contained in:
Harshavardhana 2015-04-07 00:01:19 -07:00
parent 35b8bb368d
commit b63c7fd84f
8 changed files with 49 additions and 23 deletions

4
Godeps/Godeps.json generated
View File

@ -28,7 +28,7 @@
},
{
"ImportPath": "github.com/minio-io/donut",
"Rev": "2841ca4edd6a31fce7184027e85d2bee673b49f0"
"Rev": "1adb050ccbc6b56caa8b29502adf43592d16adbd"
},
{
"ImportPath": "github.com/minio-io/erasure",
@ -40,7 +40,7 @@
},
{
"ImportPath": "github.com/minio-io/objectdriver",
"Rev": "8173914f6082348048d175c7d08bff03adde2111"
"Rev": "144846812e29de66814a67b38c949f9875e0ee46"
},
{
"ImportPath": "github.com/stretchr/objx",

View File

@ -210,14 +210,33 @@ func (s *MySuite) TestMultipleNewObjects(c *C) {
_, err = io.CopyN(&readerBuffer2, obj2, size)
c.Assert(err, IsNil)
c.Assert(readerBuffer2.Bytes(), DeepEquals, []byte("two"))
// test list objects
listObjects, _, isTruncated, err := donut.ListObjects("foo", "o", "", "", 1)
listObjects, prefixes, isTruncated, err := donut.ListObjects("foo", "o", "", "1", 1)
c.Assert(err, IsNil)
c.Assert(isTruncated, Equals, true)
c.Assert(listObjects, DeepEquals, []string{"obj1"})
c.Assert(isTruncated, Equals, false)
c.Assert(prefixes[0], DeepEquals, "obj1")
listObjects, _, isTruncated, err = donut.ListObjects("foo", "o", "", "", 10)
c.Assert(err, IsNil)
c.Assert(isTruncated, Equals, false)
c.Assert(listObjects, DeepEquals, []string{"obj1", "obj2"})
three := ioutil.NopCloser(bytes.NewReader([]byte("three")))
err = donut.PutObject("foo", "obj3", three, nil)
c.Assert(err, IsNil)
obj3, size, err := donut.GetObject("foo", "obj3")
c.Assert(err, IsNil)
c.Assert(size, Equals, int64(len([]byte("three"))))
var readerBuffer3 bytes.Buffer
_, err = io.CopyN(&readerBuffer3, obj3, size)
c.Assert(err, IsNil)
c.Assert(readerBuffer3.Bytes(), DeepEquals, []byte("three"))
listObjects, _, isTruncated, err = donut.ListObjects("foo", "o", "", "", 2)
c.Assert(err, IsNil)
c.Assert(isTruncated, Equals, true)
c.Assert(len(listObjects), Equals, 2)
}

View File

@ -70,25 +70,31 @@ func (d donut) ListObjects(bucket, prefix, marker, delimiter string, maxkeys int
}
var actualObjects []string
var commonPrefixes []string
var actualPrefixes []string
var isTruncated bool
if strings.TrimSpace(delimiter) != "" {
actualObjects = filterDelimited(donutObjects, delimiter)
commonPrefixes = filterNotDelimited(donutObjects, delimiter)
commonPrefixes = extractDir(commonPrefixes, delimiter)
commonPrefixes = uniqueObjects(commonPrefixes)
actualPrefixes = filterNotDelimited(donutObjects, delimiter)
actualPrefixes = extractDir(actualPrefixes, delimiter)
actualPrefixes = uniqueObjects(actualPrefixes)
} else {
actualObjects = donutObjects
}
var results []string
var commonPrefixes []string
for _, objectName := range actualObjects {
if len(results) >= maxkeys {
isTruncated = true
break
}
results = append(results, prefix+objectName)
results = appendUniq(results, prefix+objectName)
}
for _, commonPrefix := range actualPrefixes {
commonPrefixes = appendUniq(commonPrefixes, prefix+commonPrefix)
}
sort.Strings(results)
sort.Strings(commonPrefixes)
return results, commonPrefixes, isTruncated, nil
}

View File

@ -8,6 +8,15 @@ import (
"strings"
)
func appendUniq(slice []string, i string) []string {
for _, ele := range slice {
if ele == i {
return slice
}
}
return append(slice, i)
}
func filterPrefix(objects []string, prefix string) []string {
var results []string
for _, object := range objects {
@ -50,7 +59,7 @@ func extractDir(objects []string, delim string) []string {
var results []string
for _, object := range objects {
parts := strings.Split(object, delim)
results = append(results, parts[0]+"/")
results = append(results, parts[0]+delim)
}
return results
}

View File

@ -11,7 +11,7 @@
},
{
"ImportPath": "github.com/minio-io/donut",
"Rev": "2841ca4edd6a31fce7184027e85d2bee673b49f0"
"Rev": "1adb050ccbc6b56caa8b29502adf43592d16adbd"
},
{
"ImportPath": "github.com/minio-io/erasure",

View File

@ -154,7 +154,7 @@ func testPaging(c *check.C, create func() Driver) {
objects, resources, err = drivers.ListObjects("bucket", resources)
c.Assert(err, check.IsNil)
c.Assert(len(objects), check.Equals, 1)
c.Assert(resources.CommonPrefixes[0], check.Equals, "also/")
c.Assert(resources.CommonPrefixes[0], check.Equals, "this/is/also/")
}
time.Sleep(time.Second)

View File

@ -40,11 +40,7 @@ func (file *fileDriver) filterDelimiterPrefix(bucket, name, fname, delimitedName
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
}
case delimitedName != "":
if delimitedName == resources.Delimiter {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
} else {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
}
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
}
return metadata, resources, nil
}

View File

@ -193,11 +193,7 @@ func (memory memoryDriver) filterDelimiterPrefix(keys []string, key, delimitedNa
case key == resources.Prefix+delimitedName:
keys = appendUniq(keys, key)
case delimitedName != "":
if delimitedName == resources.Delimiter {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
} else {
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
}
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
}
return resources, keys
}