mirror of https://github.com/minio/minio.git
Merge pull request #452 from harshavardhana/pr_out_merge_new_changes_from_minio_io_donut_minio_io_objectdriver
This commit is contained in:
commit
2ddde59b46
|
@ -28,7 +28,7 @@
|
|||
},
|
||||
{
|
||||
"ImportPath": "github.com/minio-io/donut",
|
||||
"Rev": "2841ca4edd6a31fce7184027e85d2bee673b49f0"
|
||||
"Rev": "1adb050ccbc6b56caa8b29502adf43592d16adbd"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/minio-io/erasure",
|
||||
|
@ -40,7 +40,7 @@
|
|||
},
|
||||
{
|
||||
"ImportPath": "github.com/minio-io/objectdriver",
|
||||
"Rev": "8173914f6082348048d175c7d08bff03adde2111"
|
||||
"Rev": "144846812e29de66814a67b38c949f9875e0ee46"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/stretchr/objx",
|
||||
|
|
|
@ -210,14 +210,33 @@ func (s *MySuite) TestMultipleNewObjects(c *C) {
|
|||
_, err = io.CopyN(&readerBuffer2, obj2, size)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(readerBuffer2.Bytes(), DeepEquals, []byte("two"))
|
||||
|
||||
// test list objects
|
||||
listObjects, _, isTruncated, err := donut.ListObjects("foo", "o", "", "", 1)
|
||||
listObjects, prefixes, isTruncated, err := donut.ListObjects("foo", "o", "", "1", 1)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(isTruncated, Equals, true)
|
||||
c.Assert(listObjects, DeepEquals, []string{"obj1"})
|
||||
c.Assert(isTruncated, Equals, false)
|
||||
c.Assert(prefixes[0], DeepEquals, "obj1")
|
||||
|
||||
listObjects, _, isTruncated, err = donut.ListObjects("foo", "o", "", "", 10)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(isTruncated, Equals, false)
|
||||
c.Assert(listObjects, DeepEquals, []string{"obj1", "obj2"})
|
||||
|
||||
three := ioutil.NopCloser(bytes.NewReader([]byte("three")))
|
||||
err = donut.PutObject("foo", "obj3", three, nil)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
obj3, size, err := donut.GetObject("foo", "obj3")
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(size, Equals, int64(len([]byte("three"))))
|
||||
|
||||
var readerBuffer3 bytes.Buffer
|
||||
_, err = io.CopyN(&readerBuffer3, obj3, size)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(readerBuffer3.Bytes(), DeepEquals, []byte("three"))
|
||||
|
||||
listObjects, _, isTruncated, err = donut.ListObjects("foo", "o", "", "", 2)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(isTruncated, Equals, true)
|
||||
c.Assert(len(listObjects), Equals, 2)
|
||||
}
|
||||
|
|
|
@ -70,25 +70,31 @@ func (d donut) ListObjects(bucket, prefix, marker, delimiter string, maxkeys int
|
|||
}
|
||||
|
||||
var actualObjects []string
|
||||
var commonPrefixes []string
|
||||
var actualPrefixes []string
|
||||
var isTruncated bool
|
||||
if strings.TrimSpace(delimiter) != "" {
|
||||
actualObjects = filterDelimited(donutObjects, delimiter)
|
||||
commonPrefixes = filterNotDelimited(donutObjects, delimiter)
|
||||
commonPrefixes = extractDir(commonPrefixes, delimiter)
|
||||
commonPrefixes = uniqueObjects(commonPrefixes)
|
||||
actualPrefixes = filterNotDelimited(donutObjects, delimiter)
|
||||
actualPrefixes = extractDir(actualPrefixes, delimiter)
|
||||
actualPrefixes = uniqueObjects(actualPrefixes)
|
||||
} else {
|
||||
actualObjects = donutObjects
|
||||
}
|
||||
|
||||
var results []string
|
||||
var commonPrefixes []string
|
||||
for _, objectName := range actualObjects {
|
||||
if len(results) >= maxkeys {
|
||||
isTruncated = true
|
||||
break
|
||||
}
|
||||
results = append(results, prefix+objectName)
|
||||
results = appendUniq(results, prefix+objectName)
|
||||
}
|
||||
for _, commonPrefix := range actualPrefixes {
|
||||
commonPrefixes = appendUniq(commonPrefixes, prefix+commonPrefix)
|
||||
}
|
||||
sort.Strings(results)
|
||||
sort.Strings(commonPrefixes)
|
||||
return results, commonPrefixes, isTruncated, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,15 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
func appendUniq(slice []string, i string) []string {
|
||||
for _, ele := range slice {
|
||||
if ele == i {
|
||||
return slice
|
||||
}
|
||||
}
|
||||
return append(slice, i)
|
||||
}
|
||||
|
||||
func filterPrefix(objects []string, prefix string) []string {
|
||||
var results []string
|
||||
for _, object := range objects {
|
||||
|
@ -50,7 +59,7 @@ func extractDir(objects []string, delim string) []string {
|
|||
var results []string
|
||||
for _, object := range objects {
|
||||
parts := strings.Split(object, delim)
|
||||
results = append(results, parts[0]+"/")
|
||||
results = append(results, parts[0]+delim)
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
},
|
||||
{
|
||||
"ImportPath": "github.com/minio-io/donut",
|
||||
"Rev": "2841ca4edd6a31fce7184027e85d2bee673b49f0"
|
||||
"Rev": "1adb050ccbc6b56caa8b29502adf43592d16adbd"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/minio-io/erasure",
|
||||
|
|
|
@ -154,7 +154,7 @@ func testPaging(c *check.C, create func() Driver) {
|
|||
objects, resources, err = drivers.ListObjects("bucket", resources)
|
||||
c.Assert(err, check.IsNil)
|
||||
c.Assert(len(objects), check.Equals, 1)
|
||||
c.Assert(resources.CommonPrefixes[0], check.Equals, "also/")
|
||||
c.Assert(resources.CommonPrefixes[0], check.Equals, "this/is/also/")
|
||||
}
|
||||
time.Sleep(time.Second)
|
||||
|
||||
|
|
|
@ -40,11 +40,7 @@ func (file *fileDriver) filterDelimiterPrefix(bucket, name, fname, delimitedName
|
|||
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
|
||||
}
|
||||
case delimitedName != "":
|
||||
if delimitedName == resources.Delimiter {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
|
||||
} else {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
|
||||
}
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
|
||||
}
|
||||
return metadata, resources, nil
|
||||
}
|
||||
|
|
|
@ -193,11 +193,7 @@ func (memory memoryDriver) filterDelimiterPrefix(keys []string, key, delimitedNa
|
|||
case key == resources.Prefix+delimitedName:
|
||||
keys = appendUniq(keys, key)
|
||||
case delimitedName != "":
|
||||
if delimitedName == resources.Delimiter {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
|
||||
} else {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
|
||||
}
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
|
||||
}
|
||||
return resources, keys
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue