mirror of
https://github.com/minio/minio.git
synced 2025-11-10 05:59:43 -05:00
refactor ObjectLayer PutObject and PutObjectPart (#4925)
This change refactor the ObjectLayer PutObject and PutObjectPart functions. Instead of passing an io.Reader and a size to PUT operations ObejectLayer expects an HashReader. A HashReader verifies the MD5 sum (and SHA256 sum if required) of the object. This change updates all all PutObject(Part) calls and removes unnecessary code in all ObjectLayer implementations. Fixes #4923
This commit is contained in:
committed by
Dee Koder
parent
f8024cadbb
commit
79ba4d3f33
@@ -218,7 +218,7 @@ func testPutObjectPartDiskNotFound(obj ObjectLayer, instanceType string, disks [
|
||||
sha256sum := ""
|
||||
// Iterating over creatPartCases to generate multipart chunks.
|
||||
for _, testCase := range createPartCases {
|
||||
_, err = obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, testCase.intputDataSize, bytes.NewBufferString(testCase.inputReaderData), testCase.inputMd5, sha256sum)
|
||||
_, err = obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, NewHashReader(bytes.NewBufferString(testCase.inputReaderData), testCase.intputDataSize, testCase.inputMd5, sha256sum))
|
||||
if err != nil {
|
||||
t.Fatalf("%s : %s", instanceType, err.Error())
|
||||
}
|
||||
@@ -232,7 +232,7 @@ func testPutObjectPartDiskNotFound(obj ObjectLayer, instanceType string, disks [
|
||||
|
||||
// Object part upload should fail with quorum not available.
|
||||
testCase := createPartCases[len(createPartCases)-1]
|
||||
_, err = obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, testCase.intputDataSize, bytes.NewBufferString(testCase.inputReaderData), testCase.inputMd5, sha256sum)
|
||||
_, err = obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, NewHashReader(bytes.NewBufferString(testCase.inputReaderData), testCase.intputDataSize, testCase.inputMd5, sha256sum))
|
||||
if err == nil {
|
||||
t.Fatalf("Test %s: expected to fail but passed instead", instanceType)
|
||||
}
|
||||
@@ -347,7 +347,7 @@ func testObjectAPIPutObjectPart(obj ObjectLayer, instanceType string, t TestErrH
|
||||
|
||||
// Validate all the test cases.
|
||||
for i, testCase := range testCases {
|
||||
actualInfo, actualErr := obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, testCase.intputDataSize, bytes.NewBufferString(testCase.inputReaderData), testCase.inputMd5, testCase.inputSHA256)
|
||||
actualInfo, actualErr := obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, NewHashReader(bytes.NewBufferString(testCase.inputReaderData), testCase.intputDataSize, testCase.inputMd5, testCase.inputSHA256))
|
||||
// All are test cases above are expected to fail.
|
||||
if actualErr != nil && testCase.shouldPass {
|
||||
t.Errorf("Test %d: %s: Expected to pass, but failed with: <ERROR> %s.", i+1, instanceType, actualErr.Error())
|
||||
@@ -481,7 +481,7 @@ func testListMultipartUploads(obj ObjectLayer, instanceType string, t TestErrHan
|
||||
sha256sum := ""
|
||||
// Iterating over creatPartCases to generate multipart chunks.
|
||||
for _, testCase := range createPartCases {
|
||||
_, err := obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, testCase.intputDataSize, bytes.NewBufferString(testCase.inputReaderData), testCase.inputMd5, sha256sum)
|
||||
_, err := obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, NewHashReader(bytes.NewBufferString(testCase.inputReaderData), testCase.intputDataSize, testCase.inputMd5, sha256sum))
|
||||
if err != nil {
|
||||
t.Fatalf("%s : %s", instanceType, err.Error())
|
||||
}
|
||||
@@ -1336,7 +1336,7 @@ func testListObjectPartsDiskNotFound(obj ObjectLayer, instanceType string, disks
|
||||
sha256sum := ""
|
||||
// Iterating over creatPartCases to generate multipart chunks.
|
||||
for _, testCase := range createPartCases {
|
||||
_, err := obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, testCase.intputDataSize, bytes.NewBufferString(testCase.inputReaderData), testCase.inputMd5, sha256sum)
|
||||
_, err := obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, NewHashReader(bytes.NewBufferString(testCase.inputReaderData), testCase.intputDataSize, testCase.inputMd5, sha256sum))
|
||||
if err != nil {
|
||||
t.Fatalf("%s : %s", instanceType, err.Error())
|
||||
}
|
||||
@@ -1576,7 +1576,7 @@ func testListObjectParts(obj ObjectLayer, instanceType string, t TestErrHandler)
|
||||
sha256sum := ""
|
||||
// Iterating over creatPartCases to generate multipart chunks.
|
||||
for _, testCase := range createPartCases {
|
||||
_, err := obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, testCase.intputDataSize, bytes.NewBufferString(testCase.inputReaderData), testCase.inputMd5, sha256sum)
|
||||
_, err := obj.PutObjectPart(testCase.bucketName, testCase.objName, testCase.uploadID, testCase.PartID, NewHashReader(bytes.NewBufferString(testCase.inputReaderData), testCase.intputDataSize, testCase.inputMd5, sha256sum))
|
||||
if err != nil {
|
||||
t.Fatalf("%s : %s", instanceType, err.Error())
|
||||
}
|
||||
@@ -1825,7 +1825,7 @@ func testObjectCompleteMultipartUpload(obj ObjectLayer, instanceType string, t T
|
||||
sha256sum := ""
|
||||
// Iterating over creatPartCases to generate multipart chunks.
|
||||
for _, part := range parts {
|
||||
_, err = obj.PutObjectPart(part.bucketName, part.objName, part.uploadID, part.PartID, part.intputDataSize, bytes.NewBufferString(part.inputReaderData), part.inputMd5, sha256sum)
|
||||
_, err = obj.PutObjectPart(part.bucketName, part.objName, part.uploadID, part.PartID, NewHashReader(bytes.NewBufferString(part.inputReaderData), part.intputDataSize, part.inputMd5, sha256sum))
|
||||
if err != nil {
|
||||
t.Fatalf("%s : %s", instanceType, err)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user