mirror of
https://github.com/minio/minio.git
synced 2024-12-24 22:25:54 -05:00
fs/erasure: Rename meta 'md5Sum' as 'etag'. (#4319)
This PR also does backend format change to 1.0.1 from 1.0.0. Backward compatible changes are still kept to read the 'md5Sum' key. But all new objects will be stored with the same details under 'etag'. Fixes #4312
This commit is contained in:
parent
c63afabc9b
commit
155a90403a
@ -62,8 +62,8 @@ func setObjectHeaders(w http.ResponseWriter, objInfo ObjectInfo, contentRange *h
|
|||||||
w.Header().Set("Last-Modified", lastModified)
|
w.Header().Set("Last-Modified", lastModified)
|
||||||
|
|
||||||
// Set Etag if available.
|
// Set Etag if available.
|
||||||
if objInfo.MD5Sum != "" {
|
if objInfo.ETag != "" {
|
||||||
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
|
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set all other user defined metadata.
|
// Set all other user defined metadata.
|
||||||
|
@ -321,8 +321,8 @@ func generateListObjectsV1Response(bucket, prefix, marker, delimiter string, max
|
|||||||
}
|
}
|
||||||
content.Key = object.Name
|
content.Key = object.Name
|
||||||
content.LastModified = object.ModTime.UTC().Format(timeFormatAMZLong)
|
content.LastModified = object.ModTime.UTC().Format(timeFormatAMZLong)
|
||||||
if object.MD5Sum != "" {
|
if object.ETag != "" {
|
||||||
content.ETag = "\"" + object.MD5Sum + "\""
|
content.ETag = "\"" + object.ETag + "\""
|
||||||
}
|
}
|
||||||
content.Size = object.Size
|
content.Size = object.Size
|
||||||
content.StorageClass = globalMinioDefaultStorageClass
|
content.StorageClass = globalMinioDefaultStorageClass
|
||||||
@ -370,8 +370,8 @@ func generateListObjectsV2Response(bucket, prefix, token, startAfter, delimiter
|
|||||||
}
|
}
|
||||||
content.Key = object.Name
|
content.Key = object.Name
|
||||||
content.LastModified = object.ModTime.UTC().Format(timeFormatAMZLong)
|
content.LastModified = object.ModTime.UTC().Format(timeFormatAMZLong)
|
||||||
if object.MD5Sum != "" {
|
if object.ETag != "" {
|
||||||
content.ETag = "\"" + object.MD5Sum + "\""
|
content.ETag = "\"" + object.ETag + "\""
|
||||||
}
|
}
|
||||||
content.Size = object.Size
|
content.Size = object.Size
|
||||||
content.StorageClass = globalMinioDefaultStorageClass
|
content.StorageClass = globalMinioDefaultStorageClass
|
||||||
|
@ -49,7 +49,7 @@ func runPutObjectBenchmark(b *testing.B, obj ObjectLayer, objSize int) {
|
|||||||
// generate md5sum for the generated data.
|
// generate md5sum for the generated data.
|
||||||
// md5sum of the data to written is required as input for PutObject.
|
// md5sum of the data to written is required as input for PutObject.
|
||||||
metadata := make(map[string]string)
|
metadata := make(map[string]string)
|
||||||
metadata["md5Sum"] = getMD5Hash(textData)
|
metadata["etag"] = getMD5Hash(textData)
|
||||||
sha256sum := ""
|
sha256sum := ""
|
||||||
// benchmark utility which helps obtain number of allocations and bytes allocated per ops.
|
// benchmark utility which helps obtain number of allocations and bytes allocated per ops.
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
@ -61,8 +61,8 @@ func runPutObjectBenchmark(b *testing.B, obj ObjectLayer, objSize int) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
if objInfo.MD5Sum != metadata["md5Sum"] {
|
if objInfo.ETag != metadata["etag"] {
|
||||||
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.MD5Sum, metadata["md5Sum"])
|
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.ETag, metadata["etag"])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Benchmark ends here. Stop timer.
|
// Benchmark ends here. Stop timer.
|
||||||
@ -85,15 +85,15 @@ func runPutObjectPartBenchmark(b *testing.B, obj ObjectLayer, partSize int) {
|
|||||||
|
|
||||||
objSize := 128 * humanize.MiByte
|
objSize := 128 * humanize.MiByte
|
||||||
|
|
||||||
// PutObjectPart returns md5Sum of the object inserted.
|
// PutObjectPart returns etag of the object inserted.
|
||||||
// md5Sum variable is assigned with that value.
|
// etag variable is assigned with that value.
|
||||||
var md5Sum, uploadID string
|
var etag, uploadID string
|
||||||
// get text data generated for number of bytes equal to object size.
|
// get text data generated for number of bytes equal to object size.
|
||||||
textData := generateBytesData(objSize)
|
textData := generateBytesData(objSize)
|
||||||
// generate md5sum for the generated data.
|
// generate md5sum for the generated data.
|
||||||
// md5sum of the data to written is required as input for NewMultipartUpload.
|
// md5sum of the data to written is required as input for NewMultipartUpload.
|
||||||
metadata := make(map[string]string)
|
metadata := make(map[string]string)
|
||||||
metadata["md5Sum"] = getMD5Hash(textData)
|
metadata["etag"] = getMD5Hash(textData)
|
||||||
sha256sum := ""
|
sha256sum := ""
|
||||||
uploadID, err = obj.NewMultipartUpload(bucket, object, metadata)
|
uploadID, err = obj.NewMultipartUpload(bucket, object, metadata)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -115,14 +115,14 @@ func runPutObjectPartBenchmark(b *testing.B, obj ObjectLayer, partSize int) {
|
|||||||
textPartData = textData[j*partSize:]
|
textPartData = textData[j*partSize:]
|
||||||
}
|
}
|
||||||
metadata := make(map[string]string)
|
metadata := make(map[string]string)
|
||||||
metadata["md5Sum"] = getMD5Hash([]byte(textPartData))
|
metadata["etag"] = getMD5Hash([]byte(textPartData))
|
||||||
var partInfo PartInfo
|
var partInfo PartInfo
|
||||||
partInfo, err = obj.PutObjectPart(bucket, object, uploadID, j, int64(len(textPartData)), bytes.NewBuffer(textPartData), metadata["md5Sum"], sha256sum)
|
partInfo, err = obj.PutObjectPart(bucket, object, uploadID, j, int64(len(textPartData)), bytes.NewBuffer(textPartData), metadata["etag"], sha256sum)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
if partInfo.ETag != metadata["md5Sum"] {
|
if partInfo.ETag != metadata["etag"] {
|
||||||
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, md5Sum, metadata["md5Sum"])
|
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, etag, metadata["etag"])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -208,19 +208,19 @@ func runGetObjectBenchmark(b *testing.B, obj ObjectLayer, objSize int) {
|
|||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
// get text data generated for number of bytes equal to object size.
|
// get text data generated for number of bytes equal to object size.
|
||||||
textData := generateBytesData(objSize)
|
textData := generateBytesData(objSize)
|
||||||
// generate md5sum for the generated data.
|
// generate etag for the generated data.
|
||||||
// md5sum of the data to written is required as input for PutObject.
|
// etag of the data to written is required as input for PutObject.
|
||||||
// PutObject is the functions which writes the data onto the FS/XL backend.
|
// PutObject is the functions which writes the data onto the FS/XL backend.
|
||||||
metadata := make(map[string]string)
|
metadata := make(map[string]string)
|
||||||
metadata["md5Sum"] = getMD5Hash(textData)
|
metadata["etag"] = getMD5Hash(textData)
|
||||||
// insert the object.
|
// insert the object.
|
||||||
var objInfo ObjectInfo
|
var objInfo ObjectInfo
|
||||||
objInfo, err = obj.PutObject(bucket, "object"+strconv.Itoa(i), int64(len(textData)), bytes.NewBuffer(textData), metadata, sha256sum)
|
objInfo, err = obj.PutObject(bucket, "object"+strconv.Itoa(i), int64(len(textData)), bytes.NewBuffer(textData), metadata, sha256sum)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
if objInfo.MD5Sum != metadata["md5Sum"] {
|
if objInfo.ETag != metadata["etag"] {
|
||||||
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.MD5Sum, metadata["md5Sum"])
|
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.ETag, metadata["etag"])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -317,7 +317,7 @@ func runPutObjectBenchmarkParallel(b *testing.B, obj ObjectLayer, objSize int) {
|
|||||||
// generate md5sum for the generated data.
|
// generate md5sum for the generated data.
|
||||||
// md5sum of the data to written is required as input for PutObject.
|
// md5sum of the data to written is required as input for PutObject.
|
||||||
metadata := make(map[string]string)
|
metadata := make(map[string]string)
|
||||||
metadata["md5Sum"] = getMD5Hash([]byte(textData))
|
metadata["etag"] = getMD5Hash([]byte(textData))
|
||||||
sha256sum := ""
|
sha256sum := ""
|
||||||
// benchmark utility which helps obtain number of allocations and bytes allocated per ops.
|
// benchmark utility which helps obtain number of allocations and bytes allocated per ops.
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
@ -332,8 +332,8 @@ func runPutObjectBenchmarkParallel(b *testing.B, obj ObjectLayer, objSize int) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
if objInfo.MD5Sum != metadata["md5Sum"] {
|
if objInfo.ETag != metadata["etag"] {
|
||||||
b.Fatalf("Write no: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", objInfo.MD5Sum, metadata["md5Sum"])
|
b.Fatalf("Write no: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", objInfo.ETag, metadata["etag"])
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
@ -367,7 +367,7 @@ func runGetObjectBenchmarkParallel(b *testing.B, obj ObjectLayer, objSize int) {
|
|||||||
// md5sum of the data to written is required as input for PutObject.
|
// md5sum of the data to written is required as input for PutObject.
|
||||||
// PutObject is the functions which writes the data onto the FS/XL backend.
|
// PutObject is the functions which writes the data onto the FS/XL backend.
|
||||||
metadata := make(map[string]string)
|
metadata := make(map[string]string)
|
||||||
metadata["md5Sum"] = getMD5Hash([]byte(textData))
|
metadata["etag"] = getMD5Hash([]byte(textData))
|
||||||
sha256sum := ""
|
sha256sum := ""
|
||||||
// insert the object.
|
// insert the object.
|
||||||
var objInfo ObjectInfo
|
var objInfo ObjectInfo
|
||||||
@ -375,8 +375,8 @@ func runGetObjectBenchmarkParallel(b *testing.B, obj ObjectLayer, objSize int) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
if objInfo.MD5Sum != metadata["md5Sum"] {
|
if objInfo.ETag != metadata["etag"] {
|
||||||
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.MD5Sum, metadata["md5Sum"])
|
b.Fatalf("Write no: %d: Md5Sum mismatch during object write into the bucket: Expected %s, got %s", i+1, objInfo.ETag, metadata["etag"])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -535,7 +535,7 @@ func (api objectAPIHandlers) PostPolicyBucketHandler(w http.ResponseWriter, r *h
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Header().Set("ETag", `"`+objInfo.MD5Sum+`"`)
|
w.Header().Set("ETag", `"`+objInfo.ETag+`"`)
|
||||||
w.Header().Set("Location", getObjectLocation(bucket, object))
|
w.Header().Set("Location", getObjectLocation(bucket, object))
|
||||||
|
|
||||||
// Get host and port from Request.RemoteAddr.
|
// Get host and port from Request.RemoteAddr.
|
||||||
@ -568,7 +568,7 @@ func (api objectAPIHandlers) PostPolicyBucketHandler(w http.ResponseWriter, r *h
|
|||||||
resp := encodeResponse(PostResponse{
|
resp := encodeResponse(PostResponse{
|
||||||
Bucket: objInfo.Bucket,
|
Bucket: objInfo.Bucket,
|
||||||
Key: objInfo.Name,
|
Key: objInfo.Name,
|
||||||
ETag: `"` + objInfo.MD5Sum + `"`,
|
ETag: `"` + objInfo.ETag + `"`,
|
||||||
Location: getObjectLocation(objInfo.Bucket, objInfo.Name),
|
Location: getObjectLocation(objInfo.Bucket, objInfo.Name),
|
||||||
})
|
})
|
||||||
writeResponse(w, http.StatusCreated, resp, "application/xml")
|
writeResponse(w, http.StatusCreated, resp, "application/xml")
|
||||||
|
@ -172,7 +172,7 @@ func newNotificationEvent(event eventData) NotificationEvent {
|
|||||||
// For all other events we should set ETag and Size.
|
// For all other events we should set ETag and Size.
|
||||||
nEvent.S3.Object = objectMeta{
|
nEvent.S3.Object = objectMeta{
|
||||||
Key: escapedObj,
|
Key: escapedObj,
|
||||||
ETag: event.ObjInfo.MD5Sum,
|
ETag: event.ObjInfo.ETag,
|
||||||
Size: event.ObjInfo.Size,
|
Size: event.ObjInfo.Size,
|
||||||
ContentType: event.ObjInfo.ContentType,
|
ContentType: event.ObjInfo.ContentType,
|
||||||
UserDefined: event.ObjInfo.UserDefined,
|
UserDefined: event.ObjInfo.UserDefined,
|
||||||
|
@ -33,11 +33,31 @@ import (
|
|||||||
"github.com/tidwall/gjson"
|
"github.com/tidwall/gjson"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// FS format, and object metadata.
|
||||||
const (
|
const (
|
||||||
|
// fs.json object metadata.
|
||||||
fsMetaJSONFile = "fs.json"
|
fsMetaJSONFile = "fs.json"
|
||||||
|
// format.json FS format metadata.
|
||||||
fsFormatJSONFile = "format.json"
|
fsFormatJSONFile = "format.json"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// FS metadata constants.
|
||||||
|
const (
|
||||||
|
// FS backend meta 1.0.0 version.
|
||||||
|
fsMetaVersion100 = "1.0.0"
|
||||||
|
|
||||||
|
// FS backend meta 1.0.1 version.
|
||||||
|
fsMetaVersion = "1.0.1"
|
||||||
|
|
||||||
|
// FS backend meta format.
|
||||||
|
fsMetaFormat = "fs"
|
||||||
|
|
||||||
|
// FS backend format version.
|
||||||
|
fsFormatVersion = fsFormatV2
|
||||||
|
|
||||||
|
// Add more constants here.
|
||||||
|
)
|
||||||
|
|
||||||
// A fsMetaV1 represents a metadata header mapping keys to sets of values.
|
// A fsMetaV1 represents a metadata header mapping keys to sets of values.
|
||||||
type fsMetaV1 struct {
|
type fsMetaV1 struct {
|
||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
@ -50,6 +70,19 @@ type fsMetaV1 struct {
|
|||||||
Parts []objectPartInfo `json:"parts,omitempty"`
|
Parts []objectPartInfo `json:"parts,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsValid - tells if the format is sane by validating the version
|
||||||
|
// string and format style.
|
||||||
|
func (m fsMetaV1) IsValid() bool {
|
||||||
|
return isFSMetaValid(m.Version, m.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verifies if the backend format metadata is sane by validating
|
||||||
|
// the version string and format style.
|
||||||
|
func isFSMetaValid(version, format string) bool {
|
||||||
|
return ((version == fsMetaVersion || version == fsMetaVersion100) &&
|
||||||
|
format == fsMetaFormat)
|
||||||
|
}
|
||||||
|
|
||||||
// Converts metadata to object info.
|
// Converts metadata to object info.
|
||||||
func (m fsMetaV1) ToObjectInfo(bucket, object string, fi os.FileInfo) ObjectInfo {
|
func (m fsMetaV1) ToObjectInfo(bucket, object string, fi os.FileInfo) ObjectInfo {
|
||||||
if len(m.Meta) == 0 {
|
if len(m.Meta) == 0 {
|
||||||
@ -78,17 +111,15 @@ func (m fsMetaV1) ToObjectInfo(bucket, object string, fi os.FileInfo) ObjectInfo
|
|||||||
objInfo.IsDir = fi.IsDir()
|
objInfo.IsDir = fi.IsDir()
|
||||||
}
|
}
|
||||||
|
|
||||||
objInfo.MD5Sum = m.Meta["md5Sum"]
|
// Extract etag from metadata.
|
||||||
|
objInfo.ETag = extractETag(m.Meta)
|
||||||
objInfo.ContentType = m.Meta["content-type"]
|
objInfo.ContentType = m.Meta["content-type"]
|
||||||
objInfo.ContentEncoding = m.Meta["content-encoding"]
|
objInfo.ContentEncoding = m.Meta["content-encoding"]
|
||||||
|
|
||||||
// md5Sum has already been extracted into objInfo.MD5Sum. We
|
// etag/md5Sum has already been extracted. We need to
|
||||||
// need to remove it from m.Meta to avoid it from appearing as
|
// remove to avoid it from appearing as part of
|
||||||
// part of response headers. e.g, X-Minio-* or X-Amz-*.
|
// response headers. e.g, X-Minio-* or X-Amz-*.
|
||||||
delete(m.Meta, "md5Sum")
|
objInfo.UserDefined = cleanMetaETag(m.Meta)
|
||||||
|
|
||||||
// Save all the other userdefined API.
|
|
||||||
objInfo.UserDefined = m.Meta
|
|
||||||
|
|
||||||
// Success..
|
// Success..
|
||||||
return objInfo
|
return objInfo
|
||||||
@ -207,6 +238,12 @@ func (m *fsMetaV1) ReadFrom(lk *lock.LockedFile) (n int64, err error) {
|
|||||||
// obtain format.
|
// obtain format.
|
||||||
m.Format = parseFSFormat(fsMetaBuf)
|
m.Format = parseFSFormat(fsMetaBuf)
|
||||||
|
|
||||||
|
// Verify if the format is valid, return corrupted format
|
||||||
|
// for unrecognized formats.
|
||||||
|
if !isFSMetaValid(m.Version, m.Format) {
|
||||||
|
return 0, traceError(errCorruptedFormat)
|
||||||
|
}
|
||||||
|
|
||||||
// obtain metadata.
|
// obtain metadata.
|
||||||
m.Meta = parseFSMetaMap(fsMetaBuf)
|
m.Meta = parseFSMetaMap(fsMetaBuf)
|
||||||
|
|
||||||
@ -220,20 +257,6 @@ func (m *fsMetaV1) ReadFrom(lk *lock.LockedFile) (n int64, err error) {
|
|||||||
return int64(len(fsMetaBuf)), nil
|
return int64(len(fsMetaBuf)), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// FS metadata constants.
|
|
||||||
const (
|
|
||||||
// FS backend meta version.
|
|
||||||
fsMetaVersion = "1.0.0"
|
|
||||||
|
|
||||||
// FS backend meta format.
|
|
||||||
fsMetaFormat = "fs"
|
|
||||||
|
|
||||||
// FS backend format version.
|
|
||||||
fsFormatVersion = fsFormatV2
|
|
||||||
|
|
||||||
// Add more constants here.
|
|
||||||
)
|
|
||||||
|
|
||||||
// FS format version strings.
|
// FS format version strings.
|
||||||
const (
|
const (
|
||||||
fsFormatV1 = "1" // Previous format.
|
fsFormatV1 = "1" // Previous format.
|
||||||
|
@ -110,10 +110,10 @@ func TestWriteFSMetadata(t *testing.T) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal("Unexpected error ", err)
|
t.Fatal("Unexpected error ", err)
|
||||||
}
|
}
|
||||||
if fsMeta.Version != "1.0.0" {
|
if fsMeta.Version != fsMetaVersion {
|
||||||
t.Fatalf("Unexpected version %s", fsMeta.Version)
|
t.Fatalf("Unexpected version %s", fsMeta.Version)
|
||||||
}
|
}
|
||||||
if fsMeta.Format != "fs" {
|
if fsMeta.Format != fsMetaFormat {
|
||||||
t.Fatalf("Unexpected format %s", fsMeta.Format)
|
t.Fatalf("Unexpected format %s", fsMeta.Format)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -871,7 +871,7 @@ func (fs fsObjects) CompleteMultipartUpload(bucket string, object string, upload
|
|||||||
if len(fsMeta.Meta) == 0 {
|
if len(fsMeta.Meta) == 0 {
|
||||||
fsMeta.Meta = make(map[string]string)
|
fsMeta.Meta = make(map[string]string)
|
||||||
}
|
}
|
||||||
fsMeta.Meta["md5Sum"] = s3MD5
|
fsMeta.Meta["etag"] = s3MD5
|
||||||
|
|
||||||
// Write all the set metadata.
|
// Write all the set metadata.
|
||||||
if _, err = fsMeta.WriteTo(metaFile); err != nil {
|
if _, err = fsMeta.WriteTo(metaFile); err != nil {
|
||||||
|
20
cmd/fs-v1.go
20
cmd/fs-v1.go
@ -712,12 +712,12 @@ func (fs fsObjects) PutObject(bucket string, object string, size int64, data io.
|
|||||||
|
|
||||||
newMD5Hex := hex.EncodeToString(md5Writer.Sum(nil))
|
newMD5Hex := hex.EncodeToString(md5Writer.Sum(nil))
|
||||||
// Update the md5sum if not set with the newly calculated one.
|
// Update the md5sum if not set with the newly calculated one.
|
||||||
if len(metadata["md5Sum"]) == 0 {
|
if len(metadata["etag"]) == 0 {
|
||||||
metadata["md5Sum"] = newMD5Hex
|
metadata["etag"] = newMD5Hex
|
||||||
}
|
}
|
||||||
|
|
||||||
// md5Hex representation.
|
// md5Hex representation.
|
||||||
md5Hex := metadata["md5Sum"]
|
md5Hex := metadata["etag"]
|
||||||
if md5Hex != "" {
|
if md5Hex != "" {
|
||||||
if newMD5Hex != md5Hex {
|
if newMD5Hex != md5Hex {
|
||||||
// Returns md5 mismatch.
|
// Returns md5 mismatch.
|
||||||
@ -849,8 +849,12 @@ func (fs fsObjects) getObjectETag(bucket, entry string) (string, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fsMetaMap := parseFSMetaMap(fsMetaBuf)
|
// Check if FS metadata is valid, if not return error.
|
||||||
return fsMetaMap["md5Sum"], nil
|
if !isFSMetaValid(parseFSVersion(fsMetaBuf), parseFSFormat(fsMetaBuf)) {
|
||||||
|
return "", toObjectErr(traceError(errCorruptedFormat), bucket, entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
return extractETag(parseFSMetaMap(fsMetaBuf)), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ListObjects - list all objects at prefix upto maxKeys., optionally delimited by '/'. Maintains the list pool
|
// ListObjects - list all objects at prefix upto maxKeys., optionally delimited by '/'. Maintains the list pool
|
||||||
@ -901,8 +905,8 @@ func (fs fsObjects) ListObjects(bucket, prefix, marker, delimiter string, maxKey
|
|||||||
// Protect reading `fs.json`.
|
// Protect reading `fs.json`.
|
||||||
objectLock := globalNSMutex.NewNSLock(bucket, entry)
|
objectLock := globalNSMutex.NewNSLock(bucket, entry)
|
||||||
objectLock.RLock()
|
objectLock.RLock()
|
||||||
var md5Sum string
|
var etag string
|
||||||
md5Sum, err = fs.getObjectETag(bucket, entry)
|
etag, err = fs.getObjectETag(bucket, entry)
|
||||||
objectLock.RUnlock()
|
objectLock.RUnlock()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ObjectInfo{}, err
|
return ObjectInfo{}, err
|
||||||
@ -922,7 +926,7 @@ func (fs fsObjects) ListObjects(bucket, prefix, marker, delimiter string, maxKey
|
|||||||
Size: fi.Size(),
|
Size: fi.Size(),
|
||||||
ModTime: fi.ModTime(),
|
ModTime: fi.ModTime(),
|
||||||
IsDir: fi.IsDir(),
|
IsDir: fi.IsDir(),
|
||||||
MD5Sum: md5Sum,
|
ETag: etag,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,7 +244,7 @@ func TestFSMigrateObjectWithObjects(t *testing.T) {
|
|||||||
fsPath1 := pathJoin(bucketMetaPrefix, "testvolume1", "my-object1", fsMetaJSONFile)
|
fsPath1 := pathJoin(bucketMetaPrefix, "testvolume1", "my-object1", fsMetaJSONFile)
|
||||||
fsPath1 = pathJoin(disk, minioMetaBucket, fsPath1)
|
fsPath1 = pathJoin(disk, minioMetaBucket, fsPath1)
|
||||||
|
|
||||||
fsMetaJSON := `{"version":"1.0.0","format":"fs","minio":{"release":"DEVELOPMENT.2017-03-27T02-26-33Z"},"meta":{"md5Sum":"467886be95c8ecfd71a2900e3f461b4f"}`
|
fsMetaJSON := `{"version":"1.0.0","format":"fs","minio":{"release":"DEVELOPMENT.2017-03-27T02-26-33Z"},"meta":{"etag":"467886be95c8ecfd71a2900e3f461b4f"}`
|
||||||
if _, err = fsCreateFile(fsPath1, bytes.NewReader([]byte(fsMetaJSON)), nil, 0); err != nil {
|
if _, err = fsCreateFile(fsPath1, bytes.NewReader([]byte(fsMetaJSON)), nil, 0); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
@ -253,7 +253,7 @@ func TestFSMigrateObjectWithObjects(t *testing.T) {
|
|||||||
fsPath2 := pathJoin(bucketMetaPrefix, "testvolume2", "my-object2", fsMetaJSONFile)
|
fsPath2 := pathJoin(bucketMetaPrefix, "testvolume2", "my-object2", fsMetaJSONFile)
|
||||||
fsPath2 = pathJoin(disk, minioMetaBucket, fsPath2)
|
fsPath2 = pathJoin(disk, minioMetaBucket, fsPath2)
|
||||||
|
|
||||||
fsMetaJSON = `{"version":"1.0.0","format":"fs","minio":{"release":"DEVELOPMENT.2017-03-27T02-26-33Z"},"meta":{"md5Sum":"467886be95c8ecfd71a2900eff461b4d"}`
|
fsMetaJSON = `{"version":"1.0.0","format":"fs","minio":{"release":"DEVELOPMENT.2017-03-27T02-26-33Z"},"meta":{"etag":"467886be95c8ecfd71a2900eff461b4d"}`
|
||||||
if _, err = fsCreateFile(fsPath2, bytes.NewReader([]byte(fsMetaJSON)), nil, 0); err != nil {
|
if _, err = fsCreateFile(fsPath2, bytes.NewReader([]byte(fsMetaJSON)), nil, 0); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@ -127,7 +127,7 @@ func (a AzureObjects) AnonGetObjectInfo(bucket, object string) (objInfo ObjectIn
|
|||||||
objInfo.UserDefined["Content-Encoding"] = resp.Header.Get("Content-Encoding")
|
objInfo.UserDefined["Content-Encoding"] = resp.Header.Get("Content-Encoding")
|
||||||
}
|
}
|
||||||
objInfo.UserDefined["Content-Type"] = resp.Header.Get("Content-Type")
|
objInfo.UserDefined["Content-Type"] = resp.Header.Get("Content-Type")
|
||||||
objInfo.MD5Sum = resp.Header.Get("Etag")
|
objInfo.ETag = resp.Header.Get("Etag")
|
||||||
objInfo.ModTime = t
|
objInfo.ModTime = t
|
||||||
objInfo.Name = object
|
objInfo.Name = object
|
||||||
objInfo.Size = contentLength
|
objInfo.Size = contentLength
|
||||||
@ -182,7 +182,7 @@ func (a AzureObjects) AnonListObjects(bucket, prefix, marker, delimiter string,
|
|||||||
Name: object.Name,
|
Name: object.Name,
|
||||||
ModTime: t,
|
ModTime: t,
|
||||||
Size: object.Properties.ContentLength,
|
Size: object.Properties.ContentLength,
|
||||||
MD5Sum: object.Properties.Etag,
|
ETag: object.Properties.Etag,
|
||||||
ContentType: object.Properties.ContentType,
|
ContentType: object.Properties.ContentType,
|
||||||
ContentEncoding: object.Properties.ContentEncoding,
|
ContentEncoding: object.Properties.ContentEncoding,
|
||||||
})
|
})
|
||||||
|
@ -235,7 +235,7 @@ func (a AzureObjects) ListObjects(bucket, prefix, marker, delimiter string, maxK
|
|||||||
Name: object.Name,
|
Name: object.Name,
|
||||||
ModTime: t,
|
ModTime: t,
|
||||||
Size: object.Properties.ContentLength,
|
Size: object.Properties.ContentLength,
|
||||||
MD5Sum: canonicalizeETag(object.Properties.Etag),
|
ETag: canonicalizeETag(object.Properties.Etag),
|
||||||
ContentType: object.Properties.ContentType,
|
ContentType: object.Properties.ContentType,
|
||||||
ContentEncoding: object.Properties.ContentEncoding,
|
ContentEncoding: object.Properties.ContentEncoding,
|
||||||
})
|
})
|
||||||
@ -285,7 +285,7 @@ func (a AzureObjects) GetObjectInfo(bucket, object string) (objInfo ObjectInfo,
|
|||||||
objInfo = ObjectInfo{
|
objInfo = ObjectInfo{
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
UserDefined: make(map[string]string),
|
UserDefined: make(map[string]string),
|
||||||
MD5Sum: canonicalizeETag(prop.Etag),
|
ETag: canonicalizeETag(prop.Etag),
|
||||||
ModTime: t,
|
ModTime: t,
|
||||||
Name: object,
|
Name: object,
|
||||||
Size: prop.ContentLength,
|
Size: prop.ContentLength,
|
||||||
@ -319,7 +319,7 @@ func (a AzureObjects) PutObject(bucket, object string, size int64, data io.Reade
|
|||||||
teeReader = io.TeeReader(data, sha256Writer)
|
teeReader = io.TeeReader(data, sha256Writer)
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(metadata, "md5Sum")
|
delete(metadata, "etag")
|
||||||
|
|
||||||
err = a.client.CreateBlockBlobFromReader(bucket, object, uint64(size), teeReader, canonicalMetadata(metadata))
|
err = a.client.CreateBlockBlobFromReader(bucket, object, uint64(size), teeReader, canonicalMetadata(metadata))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -234,7 +234,7 @@ func (api gatewayAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Re
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Make sure we hex encode md5sum here.
|
// Make sure we hex encode md5sum here.
|
||||||
metadata["md5Sum"] = hex.EncodeToString(md5Bytes)
|
metadata["etag"] = hex.EncodeToString(md5Bytes)
|
||||||
|
|
||||||
sha256sum := ""
|
sha256sum := ""
|
||||||
|
|
||||||
@ -282,7 +282,7 @@ func (api gatewayAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Re
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
|
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
|
||||||
writeSuccessResponseHeadersOnly(w)
|
writeSuccessResponseHeadersOnly(w)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,13 +36,13 @@ func (l *s3Gateway) AnonPutObject(bucket string, object string, size int64, data
|
|||||||
}
|
}
|
||||||
|
|
||||||
var md5sumBytes []byte
|
var md5sumBytes []byte
|
||||||
md5sum := metadata["md5Sum"]
|
md5sum := metadata["etag"]
|
||||||
if md5sum != "" {
|
if md5sum != "" {
|
||||||
md5sumBytes, err = hex.DecodeString(md5sum)
|
md5sumBytes, err = hex.DecodeString(md5sum)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ObjectInfo{}, s3ToObjectError(traceError(err), bucket, object)
|
return ObjectInfo{}, s3ToObjectError(traceError(err), bucket, object)
|
||||||
}
|
}
|
||||||
delete(metadata, "md5Sum")
|
delete(metadata, "etag")
|
||||||
}
|
}
|
||||||
|
|
||||||
oi, err := l.anonClient.PutObject(bucket, object, size, data, md5sumBytes, sha256sumBytes, toMinioClientMetadata(metadata))
|
oi, err := l.anonClient.PutObject(bucket, object, size, data, md5sumBytes, sha256sumBytes, toMinioClientMetadata(metadata))
|
||||||
|
@ -295,7 +295,7 @@ func fromMinioClientObjectInfo(bucket string, oi minio.ObjectInfo) ObjectInfo {
|
|||||||
Name: oi.Key,
|
Name: oi.Key,
|
||||||
ModTime: oi.LastModified,
|
ModTime: oi.LastModified,
|
||||||
Size: oi.Size,
|
Size: oi.Size,
|
||||||
MD5Sum: oi.ETag,
|
ETag: oi.ETag,
|
||||||
UserDefined: userDefined,
|
UserDefined: userDefined,
|
||||||
ContentType: oi.ContentType,
|
ContentType: oi.ContentType,
|
||||||
ContentEncoding: oi.Metadata.Get("Content-Encoding"),
|
ContentEncoding: oi.Metadata.Get("Content-Encoding"),
|
||||||
@ -326,13 +326,13 @@ func (l *s3Gateway) PutObject(bucket string, object string, size int64, data io.
|
|||||||
}
|
}
|
||||||
|
|
||||||
var md5sumBytes []byte
|
var md5sumBytes []byte
|
||||||
md5sum := metadata["md5Sum"]
|
md5sum := metadata["etag"]
|
||||||
if md5sum != "" {
|
if md5sum != "" {
|
||||||
md5sumBytes, err = hex.DecodeString(md5sum)
|
md5sumBytes, err = hex.DecodeString(md5sum)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ObjectInfo{}, s3ToObjectError(traceError(err), bucket, object)
|
return ObjectInfo{}, s3ToObjectError(traceError(err), bucket, object)
|
||||||
}
|
}
|
||||||
delete(metadata, "md5Sum")
|
delete(metadata, "etag")
|
||||||
}
|
}
|
||||||
|
|
||||||
oi, err := l.Client.PutObject(bucket, object, size, data, md5sumBytes, sha256sumBytes, toMinioClientMetadata(metadata))
|
oi, err := l.Client.PutObject(bucket, object, size, data, md5sumBytes, sha256sumBytes, toMinioClientMetadata(metadata))
|
||||||
|
@ -134,7 +134,7 @@ func getRedirectPostRawQuery(objInfo ObjectInfo) string {
|
|||||||
redirectValues := make(url.Values)
|
redirectValues := make(url.Values)
|
||||||
redirectValues.Set("bucket", objInfo.Bucket)
|
redirectValues.Set("bucket", objInfo.Bucket)
|
||||||
redirectValues.Set("key", objInfo.Name)
|
redirectValues.Set("key", objInfo.Name)
|
||||||
redirectValues.Set("etag", "\""+objInfo.MD5Sum+"\"")
|
redirectValues.Set("etag", "\""+objInfo.ETag+"\"")
|
||||||
return redirectValues.Encode()
|
return redirectValues.Encode()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,8 +35,8 @@ const (
|
|||||||
// Objects meta prefix.
|
// Objects meta prefix.
|
||||||
objectMetaPrefix = "objects"
|
objectMetaPrefix = "objects"
|
||||||
|
|
||||||
// Md5Sum of empty string.
|
// ETag (hex encoded md5sum) of empty string.
|
||||||
emptyStrMd5Sum = "d41d8cd98f00b204e9800998ecf8427e"
|
emptyETag = "d41d8cd98f00b204e9800998ecf8427e"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Global object layer mutex, used for safely updating object layer.
|
// Global object layer mutex, used for safely updating object layer.
|
||||||
@ -68,10 +68,10 @@ func dirObjectInfo(bucket, object string, size int64, metadata map[string]string
|
|||||||
// This is a special case with size as '0' and object ends with
|
// This is a special case with size as '0' and object ends with
|
||||||
// a slash separator, we treat it like a valid operation and
|
// a slash separator, we treat it like a valid operation and
|
||||||
// return success.
|
// return success.
|
||||||
md5Sum := metadata["md5Sum"]
|
etag := metadata["etag"]
|
||||||
delete(metadata, "md5Sum")
|
delete(metadata, "etag")
|
||||||
if md5Sum == "" {
|
if etag == "" {
|
||||||
md5Sum = emptyStrMd5Sum
|
etag = emptyETag
|
||||||
}
|
}
|
||||||
|
|
||||||
return ObjectInfo{
|
return ObjectInfo{
|
||||||
@ -81,7 +81,7 @@ func dirObjectInfo(bucket, object string, size int64, metadata map[string]string
|
|||||||
ContentType: "application/octet-stream",
|
ContentType: "application/octet-stream",
|
||||||
IsDir: true,
|
IsDir: true,
|
||||||
Size: size,
|
Size: size,
|
||||||
MD5Sum: md5Sum,
|
ETag: etag,
|
||||||
UserDefined: metadata,
|
UserDefined: metadata,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -101,8 +101,8 @@ type ObjectInfo struct {
|
|||||||
// IsDir indicates if the object is prefix.
|
// IsDir indicates if the object is prefix.
|
||||||
IsDir bool
|
IsDir bool
|
||||||
|
|
||||||
// Hex encoded md5 checksum of the object.
|
// Hex encoded unique entity tag of the object.
|
||||||
MD5Sum string
|
ETag string
|
||||||
|
|
||||||
// A standard MIME type describing the format of the object.
|
// A standard MIME type describing the format of the object.
|
||||||
ContentType string
|
ContentType string
|
||||||
|
@ -549,7 +549,7 @@ func testListObjects(obj ObjectLayer, instanceType string, t TestErrHandler) {
|
|||||||
if testCase.result.Objects[j].Name != result.Objects[j].Name {
|
if testCase.result.Objects[j].Name != result.Objects[j].Name {
|
||||||
t.Errorf("Test %d: %s: Expected object name to be \"%s\", but found \"%s\" instead", i+1, instanceType, testCase.result.Objects[j].Name, result.Objects[j].Name)
|
t.Errorf("Test %d: %s: Expected object name to be \"%s\", but found \"%s\" instead", i+1, instanceType, testCase.result.Objects[j].Name, result.Objects[j].Name)
|
||||||
}
|
}
|
||||||
if result.Objects[j].MD5Sum == "" {
|
if result.Objects[j].ETag == "" {
|
||||||
t.Errorf("Test %d: %s: Expected md5sum to be not empty, but found empty instead", i+1, instanceType)
|
t.Errorf("Test %d: %s: Expected md5sum to be not empty, but found empty instead", i+1, instanceType)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1930,7 +1930,7 @@ func testObjectCompleteMultipartUpload(obj ObjectLayer, instanceType string, t T
|
|||||||
if actualErr == nil && testCase.shouldPass {
|
if actualErr == nil && testCase.shouldPass {
|
||||||
|
|
||||||
// Asserting IsTruncated.
|
// Asserting IsTruncated.
|
||||||
if actualResult.MD5Sum != testCase.expectedS3MD5 {
|
if actualResult.ETag != testCase.expectedS3MD5 {
|
||||||
t.Errorf("Test %d: %s: Expected the result to be \"%v\", but found it to \"%v\"", i+1, instanceType, testCase.expectedS3MD5, actualResult)
|
t.Errorf("Test %d: %s: Expected the result to be \"%v\", but found it to \"%v\"", i+1, instanceType, testCase.expectedS3MD5, actualResult)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,7 +29,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func md5Header(data []byte) map[string]string {
|
func md5Header(data []byte) map[string]string {
|
||||||
return map[string]string{"md5Sum": getMD5Hash([]byte(data))}
|
return map[string]string{"etag": getMD5Hash([]byte(data))}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wrapper for calling PutObject tests for both XL multiple disks and single node setup.
|
// Wrapper for calling PutObject tests for both XL multiple disks and single node setup.
|
||||||
@ -94,29 +94,29 @@ func testObjectAPIPutObject(obj ObjectLayer, instanceType string, t TestErrHandl
|
|||||||
|
|
||||||
// Test case - 7.
|
// Test case - 7.
|
||||||
// Input to replicate Md5 mismatch.
|
// Input to replicate Md5 mismatch.
|
||||||
{bucket, object, []byte(""), map[string]string{"md5Sum": "a35"}, "", 0, "",
|
{bucket, object, []byte(""), map[string]string{"etag": "a35"}, "", 0, "",
|
||||||
BadDigest{ExpectedMD5: "a35", CalculatedMD5: "d41d8cd98f00b204e9800998ecf8427e"}},
|
BadDigest{ExpectedMD5: "a35", CalculatedMD5: "d41d8cd98f00b204e9800998ecf8427e"}},
|
||||||
|
|
||||||
// Test case - 8.
|
// Test case - 8.
|
||||||
// With incorrect sha256.
|
// With incorrect sha256.
|
||||||
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "e2fc714c4727ee9395f324cd2e7f331f"}, "incorrect-sha256", int64(len("abcd")), "", SHA256Mismatch{}},
|
{bucket, object, []byte("abcd"), map[string]string{"etag": "e2fc714c4727ee9395f324cd2e7f331f"}, "incorrect-sha256", int64(len("abcd")), "", SHA256Mismatch{}},
|
||||||
|
|
||||||
// Test case - 9.
|
// Test case - 9.
|
||||||
// Input with size more than the size of actual data inside the reader.
|
// Input with size more than the size of actual data inside the reader.
|
||||||
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "a35"}, "", int64(len("abcd") + 1), "",
|
{bucket, object, []byte("abcd"), map[string]string{"etag": "a35"}, "", int64(len("abcd") + 1), "",
|
||||||
IncompleteBody{}},
|
IncompleteBody{}},
|
||||||
|
|
||||||
// Test case - 10.
|
// Test case - 10.
|
||||||
// Input with size less than the size of actual data inside the reader.
|
// Input with size less than the size of actual data inside the reader.
|
||||||
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "a35"}, "", int64(len("abcd") - 1), "",
|
{bucket, object, []byte("abcd"), map[string]string{"etag": "a35"}, "", int64(len("abcd") - 1), "",
|
||||||
BadDigest{ExpectedMD5: "a35", CalculatedMD5: "900150983cd24fb0d6963f7d28e17f72"}},
|
BadDigest{ExpectedMD5: "a35", CalculatedMD5: "900150983cd24fb0d6963f7d28e17f72"}},
|
||||||
|
|
||||||
// Test case - 11-14.
|
// Test case - 11-14.
|
||||||
// Validating for success cases.
|
// Validating for success cases.
|
||||||
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "e2fc714c4727ee9395f324cd2e7f331f"}, "", int64(len("abcd")), "", nil},
|
{bucket, object, []byte("abcd"), map[string]string{"etag": "e2fc714c4727ee9395f324cd2e7f331f"}, "", int64(len("abcd")), "", nil},
|
||||||
{bucket, object, []byte("efgh"), map[string]string{"md5Sum": "1f7690ebdd9b4caf8fab49ca1757bf27"}, "", int64(len("efgh")), "", nil},
|
{bucket, object, []byte("efgh"), map[string]string{"etag": "1f7690ebdd9b4caf8fab49ca1757bf27"}, "", int64(len("efgh")), "", nil},
|
||||||
{bucket, object, []byte("ijkl"), map[string]string{"md5Sum": "09a0877d04abf8759f99adec02baf579"}, "", int64(len("ijkl")), "", nil},
|
{bucket, object, []byte("ijkl"), map[string]string{"etag": "09a0877d04abf8759f99adec02baf579"}, "", int64(len("ijkl")), "", nil},
|
||||||
{bucket, object, []byte("mnop"), map[string]string{"md5Sum": "e132e96a5ddad6da8b07bba6f6131fef"}, "", int64(len("mnop")), "", nil},
|
{bucket, object, []byte("mnop"), map[string]string{"etag": "e132e96a5ddad6da8b07bba6f6131fef"}, "", int64(len("mnop")), "", nil},
|
||||||
|
|
||||||
// Test case 15-17.
|
// Test case 15-17.
|
||||||
// With no metadata
|
// With no metadata
|
||||||
@ -169,8 +169,8 @@ func testObjectAPIPutObject(obj ObjectLayer, instanceType string, t TestErrHandl
|
|||||||
// Test passes as expected, but the output values are verified for correctness here.
|
// Test passes as expected, but the output values are verified for correctness here.
|
||||||
if actualErr == nil {
|
if actualErr == nil {
|
||||||
// Asserting whether the md5 output is correct.
|
// Asserting whether the md5 output is correct.
|
||||||
if expectedMD5, ok := testCase.inputMeta["md5Sum"]; ok && expectedMD5 != objInfo.MD5Sum {
|
if expectedMD5, ok := testCase.inputMeta["etag"]; ok && expectedMD5 != objInfo.ETag {
|
||||||
t.Errorf("Test %d: %s: Calculated Md5 different from the actual one %s.", i+1, instanceType, objInfo.MD5Sum)
|
t.Errorf("Test %d: %s: Calculated Md5 different from the actual one %s.", i+1, instanceType, objInfo.ETag)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -220,10 +220,10 @@ func testObjectAPIPutObjectDiskNotFound(obj ObjectLayer, instanceType string, di
|
|||||||
expectedError error
|
expectedError error
|
||||||
}{
|
}{
|
||||||
// Validating for success cases.
|
// Validating for success cases.
|
||||||
{bucket, object, []byte("abcd"), map[string]string{"md5Sum": "e2fc714c4727ee9395f324cd2e7f331f"}, int64(len("abcd")), true, "", nil},
|
{bucket, object, []byte("abcd"), map[string]string{"etag": "e2fc714c4727ee9395f324cd2e7f331f"}, int64(len("abcd")), true, "", nil},
|
||||||
{bucket, object, []byte("efgh"), map[string]string{"md5Sum": "1f7690ebdd9b4caf8fab49ca1757bf27"}, int64(len("efgh")), true, "", nil},
|
{bucket, object, []byte("efgh"), map[string]string{"etag": "1f7690ebdd9b4caf8fab49ca1757bf27"}, int64(len("efgh")), true, "", nil},
|
||||||
{bucket, object, []byte("ijkl"), map[string]string{"md5Sum": "09a0877d04abf8759f99adec02baf579"}, int64(len("ijkl")), true, "", nil},
|
{bucket, object, []byte("ijkl"), map[string]string{"etag": "09a0877d04abf8759f99adec02baf579"}, int64(len("ijkl")), true, "", nil},
|
||||||
{bucket, object, []byte("mnop"), map[string]string{"md5Sum": "e132e96a5ddad6da8b07bba6f6131fef"}, int64(len("mnop")), true, "", nil},
|
{bucket, object, []byte("mnop"), map[string]string{"etag": "e132e96a5ddad6da8b07bba6f6131fef"}, int64(len("mnop")), true, "", nil},
|
||||||
}
|
}
|
||||||
|
|
||||||
sha256sum := ""
|
sha256sum := ""
|
||||||
@ -246,8 +246,8 @@ func testObjectAPIPutObjectDiskNotFound(obj ObjectLayer, instanceType string, di
|
|||||||
// Test passes as expected, but the output values are verified for correctness here.
|
// Test passes as expected, but the output values are verified for correctness here.
|
||||||
if actualErr == nil && testCase.shouldPass {
|
if actualErr == nil && testCase.shouldPass {
|
||||||
// Asserting whether the md5 output is correct.
|
// Asserting whether the md5 output is correct.
|
||||||
if testCase.inputMeta["md5Sum"] != objInfo.MD5Sum {
|
if testCase.inputMeta["etag"] != objInfo.ETag {
|
||||||
t.Errorf("Test %d: %s: Calculated Md5 different from the actual one %s.", i+1, instanceType, objInfo.MD5Sum)
|
t.Errorf("Test %d: %s: Calculated Md5 different from the actual one %s.", i+1, instanceType, objInfo.ETag)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -271,7 +271,7 @@ func testObjectAPIPutObjectDiskNotFound(obj ObjectLayer, instanceType string, di
|
|||||||
bucket,
|
bucket,
|
||||||
object,
|
object,
|
||||||
[]byte("mnop"),
|
[]byte("mnop"),
|
||||||
map[string]string{"md5Sum": "e132e96a5ddad6da8b07bba6f6131fef"},
|
map[string]string{"etag": "e132e96a5ddad6da8b07bba6f6131fef"},
|
||||||
int64(len("mnop")),
|
int64(len("mnop")),
|
||||||
false,
|
false,
|
||||||
"",
|
"",
|
||||||
|
@ -187,6 +187,35 @@ func getCompleteMultipartMD5(parts []completePart) (string, error) {
|
|||||||
return s3MD5, nil
|
return s3MD5, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clean meta etag keys 'md5Sum', 'etag'.
|
||||||
|
func cleanMetaETag(metadata map[string]string) map[string]string {
|
||||||
|
return cleanMetadata(metadata, "md5Sum", "etag")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean metadata takes keys to be filtered
|
||||||
|
// and returns a new map with the keys filtered.
|
||||||
|
func cleanMetadata(metadata map[string]string, keyNames ...string) map[string]string {
|
||||||
|
var newMeta = make(map[string]string)
|
||||||
|
for k, v := range metadata {
|
||||||
|
if contains(keyNames, k) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
newMeta[k] = v
|
||||||
|
}
|
||||||
|
return newMeta
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extracts etag value from the metadata.
|
||||||
|
func extractETag(metadata map[string]string) string {
|
||||||
|
// md5Sum tag is kept for backward compatibility.
|
||||||
|
etag, ok := metadata["md5Sum"]
|
||||||
|
if !ok {
|
||||||
|
etag = metadata["etag"]
|
||||||
|
}
|
||||||
|
// Success.
|
||||||
|
return etag
|
||||||
|
}
|
||||||
|
|
||||||
// Prefix matcher string matches prefix in a platform specific way.
|
// Prefix matcher string matches prefix in a platform specific way.
|
||||||
// For example on windows since its case insensitive we are supposed
|
// For example on windows since its case insensitive we are supposed
|
||||||
// to do case insensitive checks.
|
// to do case insensitive checks.
|
||||||
|
@ -59,8 +59,8 @@ func checkCopyObjectPreconditions(w http.ResponseWriter, r *http.Request, objInf
|
|||||||
// set object-related metadata headers
|
// set object-related metadata headers
|
||||||
w.Header().Set("Last-Modified", objInfo.ModTime.UTC().Format(http.TimeFormat))
|
w.Header().Set("Last-Modified", objInfo.ModTime.UTC().Format(http.TimeFormat))
|
||||||
|
|
||||||
if objInfo.MD5Sum != "" {
|
if objInfo.ETag != "" {
|
||||||
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
|
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// x-amz-copy-source-if-modified-since: Return the object only if it has been modified
|
// x-amz-copy-source-if-modified-since: Return the object only if it has been modified
|
||||||
@ -95,7 +95,7 @@ func checkCopyObjectPreconditions(w http.ResponseWriter, r *http.Request, objInf
|
|||||||
// same as the one specified; otherwise return a 412 (precondition failed).
|
// same as the one specified; otherwise return a 412 (precondition failed).
|
||||||
ifMatchETagHeader := r.Header.Get("x-amz-copy-source-if-match")
|
ifMatchETagHeader := r.Header.Get("x-amz-copy-source-if-match")
|
||||||
if ifMatchETagHeader != "" {
|
if ifMatchETagHeader != "" {
|
||||||
if objInfo.MD5Sum != "" && !isETagEqual(objInfo.MD5Sum, ifMatchETagHeader) {
|
if objInfo.ETag != "" && !isETagEqual(objInfo.ETag, ifMatchETagHeader) {
|
||||||
// If the object ETag does not match with the specified ETag.
|
// If the object ETag does not match with the specified ETag.
|
||||||
writeHeaders()
|
writeHeaders()
|
||||||
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
|
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
|
||||||
@ -107,7 +107,7 @@ func checkCopyObjectPreconditions(w http.ResponseWriter, r *http.Request, objInf
|
|||||||
// one specified otherwise, return a 304 (not modified).
|
// one specified otherwise, return a 304 (not modified).
|
||||||
ifNoneMatchETagHeader := r.Header.Get("x-amz-copy-source-if-none-match")
|
ifNoneMatchETagHeader := r.Header.Get("x-amz-copy-source-if-none-match")
|
||||||
if ifNoneMatchETagHeader != "" {
|
if ifNoneMatchETagHeader != "" {
|
||||||
if objInfo.MD5Sum != "" && isETagEqual(objInfo.MD5Sum, ifNoneMatchETagHeader) {
|
if objInfo.ETag != "" && isETagEqual(objInfo.ETag, ifNoneMatchETagHeader) {
|
||||||
// If the object ETag matches with the specified ETag.
|
// If the object ETag matches with the specified ETag.
|
||||||
writeHeaders()
|
writeHeaders()
|
||||||
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
|
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
|
||||||
@ -144,8 +144,8 @@ func checkPreconditions(w http.ResponseWriter, r *http.Request, objInfo ObjectIn
|
|||||||
// set object-related metadata headers
|
// set object-related metadata headers
|
||||||
w.Header().Set("Last-Modified", objInfo.ModTime.UTC().Format(http.TimeFormat))
|
w.Header().Set("Last-Modified", objInfo.ModTime.UTC().Format(http.TimeFormat))
|
||||||
|
|
||||||
if objInfo.MD5Sum != "" {
|
if objInfo.ETag != "" {
|
||||||
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
|
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// If-Modified-Since : Return the object only if it has been modified since the specified time,
|
// If-Modified-Since : Return the object only if it has been modified since the specified time,
|
||||||
@ -180,7 +180,7 @@ func checkPreconditions(w http.ResponseWriter, r *http.Request, objInfo ObjectIn
|
|||||||
// otherwise return a 412 (precondition failed).
|
// otherwise return a 412 (precondition failed).
|
||||||
ifMatchETagHeader := r.Header.Get("If-Match")
|
ifMatchETagHeader := r.Header.Get("If-Match")
|
||||||
if ifMatchETagHeader != "" {
|
if ifMatchETagHeader != "" {
|
||||||
if !isETagEqual(objInfo.MD5Sum, ifMatchETagHeader) {
|
if !isETagEqual(objInfo.ETag, ifMatchETagHeader) {
|
||||||
// If the object ETag does not match with the specified ETag.
|
// If the object ETag does not match with the specified ETag.
|
||||||
writeHeaders()
|
writeHeaders()
|
||||||
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
|
writeErrorResponse(w, ErrPreconditionFailed, r.URL)
|
||||||
@ -192,7 +192,7 @@ func checkPreconditions(w http.ResponseWriter, r *http.Request, objInfo ObjectIn
|
|||||||
// one specified otherwise, return a 304 (not modified).
|
// one specified otherwise, return a 304 (not modified).
|
||||||
ifNoneMatchETagHeader := r.Header.Get("If-None-Match")
|
ifNoneMatchETagHeader := r.Header.Get("If-None-Match")
|
||||||
if ifNoneMatchETagHeader != "" {
|
if ifNoneMatchETagHeader != "" {
|
||||||
if isETagEqual(objInfo.MD5Sum, ifNoneMatchETagHeader) {
|
if isETagEqual(objInfo.ETag, ifNoneMatchETagHeader) {
|
||||||
// If the object ETag matches with the specified ETag.
|
// If the object ETag matches with the specified ETag.
|
||||||
writeHeaders()
|
writeHeaders()
|
||||||
w.WriteHeader(http.StatusNotModified)
|
w.WriteHeader(http.StatusNotModified)
|
||||||
|
@ -360,10 +360,8 @@ func (api objectAPIHandlers) CopyObjectHandler(w http.ResponseWriter, r *http.Re
|
|||||||
|
|
||||||
defaultMeta := objInfo.UserDefined
|
defaultMeta := objInfo.UserDefined
|
||||||
|
|
||||||
// Make sure to remove saved md5sum, object might have been uploaded
|
// Make sure to remove saved etag, CopyObject calculates a new one.
|
||||||
// as multipart which doesn't have a standard md5sum, we just let
|
delete(defaultMeta, "etag")
|
||||||
// CopyObject calculate a new one.
|
|
||||||
delete(defaultMeta, "md5Sum")
|
|
||||||
|
|
||||||
newMetadata := getCpObjMetadataFromHeader(r.Header, defaultMeta)
|
newMetadata := getCpObjMetadataFromHeader(r.Header, defaultMeta)
|
||||||
// Check if x-amz-metadata-directive was not set to REPLACE and source,
|
// Check if x-amz-metadata-directive was not set to REPLACE and source,
|
||||||
@ -383,8 +381,7 @@ func (api objectAPIHandlers) CopyObjectHandler(w http.ResponseWriter, r *http.Re
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
md5Sum := objInfo.MD5Sum
|
response := generateCopyObjectResponse(objInfo.ETag, objInfo.ModTime)
|
||||||
response := generateCopyObjectResponse(md5Sum, objInfo.ModTime)
|
|
||||||
encodedSuccessResponse := encodeResponse(response)
|
encodedSuccessResponse := encodeResponse(response)
|
||||||
|
|
||||||
// Write success response.
|
// Write success response.
|
||||||
@ -482,7 +479,7 @@ func (api objectAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Req
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Make sure we hex encode md5sum here.
|
// Make sure we hex encode md5sum here.
|
||||||
metadata["md5Sum"] = hex.EncodeToString(md5Bytes)
|
metadata["etag"] = hex.EncodeToString(md5Bytes)
|
||||||
|
|
||||||
sha256sum := ""
|
sha256sum := ""
|
||||||
|
|
||||||
@ -540,7 +537,7 @@ func (api objectAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Req
|
|||||||
writeErrorResponse(w, toAPIErrorCode(err), r.URL)
|
writeErrorResponse(w, toAPIErrorCode(err), r.URL)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
|
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
|
||||||
writeSuccessResponseHeadersOnly(w)
|
writeSuccessResponseHeadersOnly(w)
|
||||||
|
|
||||||
// Get host and port from Request.RemoteAddr.
|
// Get host and port from Request.RemoteAddr.
|
||||||
@ -965,7 +962,7 @@ func (api objectAPIHandlers) CompleteMultipartUploadHandler(w http.ResponseWrite
|
|||||||
// Get object location.
|
// Get object location.
|
||||||
location := getLocation(r)
|
location := getLocation(r)
|
||||||
// Generate complete multipart response.
|
// Generate complete multipart response.
|
||||||
response := generateCompleteMultpartUploadResponse(bucket, object, location, objInfo.MD5Sum)
|
response := generateCompleteMultpartUploadResponse(bucket, object, location, objInfo.ETag)
|
||||||
encodedSuccessResponse := encodeResponse(response)
|
encodedSuccessResponse := encodeResponse(response)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errorIf(err, "Unable to parse CompleteMultipartUpload response")
|
errorIf(err, "Unable to parse CompleteMultipartUpload response")
|
||||||
@ -974,7 +971,7 @@ func (api objectAPIHandlers) CompleteMultipartUploadHandler(w http.ResponseWrite
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Set etag.
|
// Set etag.
|
||||||
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
|
w.Header().Set("ETag", "\""+objInfo.ETag+"\"")
|
||||||
|
|
||||||
// Write success response.
|
// Write success response.
|
||||||
writeSuccessResponseXML(w, encodedSuccessResponse)
|
writeSuccessResponseXML(w, encodedSuccessResponse)
|
||||||
|
@ -104,14 +104,14 @@ func testMultipartObjectCreation(obj ObjectLayer, instanceType string, c TestErr
|
|||||||
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
|
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
|
||||||
completedParts := completeMultipartUpload{}
|
completedParts := completeMultipartUpload{}
|
||||||
for i := 1; i <= 10; i++ {
|
for i := 1; i <= 10; i++ {
|
||||||
expectedMD5Sumhex := getMD5Hash(data)
|
expectedETaghex := getMD5Hash(data)
|
||||||
|
|
||||||
var calcPartInfo PartInfo
|
var calcPartInfo PartInfo
|
||||||
calcPartInfo, err = obj.PutObjectPart("bucket", "key", uploadID, i, int64(len(data)), bytes.NewBuffer(data), expectedMD5Sumhex, "")
|
calcPartInfo, err = obj.PutObjectPart("bucket", "key", uploadID, i, int64(len(data)), bytes.NewBuffer(data), expectedETaghex, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Errorf("%s: <ERROR> %s", instanceType, err)
|
c.Errorf("%s: <ERROR> %s", instanceType, err)
|
||||||
}
|
}
|
||||||
if calcPartInfo.ETag != expectedMD5Sumhex {
|
if calcPartInfo.ETag != expectedETaghex {
|
||||||
c.Errorf("MD5 Mismatch")
|
c.Errorf("MD5 Mismatch")
|
||||||
}
|
}
|
||||||
completedParts.Parts = append(completedParts.Parts, completePart{
|
completedParts.Parts = append(completedParts.Parts, completePart{
|
||||||
@ -123,7 +123,7 @@ func testMultipartObjectCreation(obj ObjectLayer, instanceType string, c TestErr
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
c.Fatalf("%s: <ERROR> %s", instanceType, err)
|
c.Fatalf("%s: <ERROR> %s", instanceType, err)
|
||||||
}
|
}
|
||||||
if objInfo.MD5Sum != "7d364cb728ce42a74a96d22949beefb2-10" {
|
if objInfo.ETag != "7d364cb728ce42a74a96d22949beefb2-10" {
|
||||||
c.Errorf("Md5 mismtch")
|
c.Errorf("Md5 mismtch")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -153,18 +153,18 @@ func testMultipartObjectAbort(obj ObjectLayer, instanceType string, c TestErrHan
|
|||||||
randomString = randomString + strconv.Itoa(num)
|
randomString = randomString + strconv.Itoa(num)
|
||||||
}
|
}
|
||||||
|
|
||||||
expectedMD5Sumhex := getMD5Hash([]byte(randomString))
|
expectedETaghex := getMD5Hash([]byte(randomString))
|
||||||
|
|
||||||
metadata["md5"] = expectedMD5Sumhex
|
metadata["md5"] = expectedETaghex
|
||||||
var calcPartInfo PartInfo
|
var calcPartInfo PartInfo
|
||||||
calcPartInfo, err = obj.PutObjectPart("bucket", "key", uploadID, i, int64(len(randomString)), bytes.NewBufferString(randomString), expectedMD5Sumhex, "")
|
calcPartInfo, err = obj.PutObjectPart("bucket", "key", uploadID, i, int64(len(randomString)), bytes.NewBufferString(randomString), expectedETaghex, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Fatalf("%s: <ERROR> %s", instanceType, err)
|
c.Fatalf("%s: <ERROR> %s", instanceType, err)
|
||||||
}
|
}
|
||||||
if calcPartInfo.ETag != expectedMD5Sumhex {
|
if calcPartInfo.ETag != expectedETaghex {
|
||||||
c.Errorf("Md5 Mismatch")
|
c.Errorf("Md5 Mismatch")
|
||||||
}
|
}
|
||||||
parts[i] = expectedMD5Sumhex
|
parts[i] = expectedETaghex
|
||||||
}
|
}
|
||||||
err = obj.AbortMultipartUpload("bucket", "key", uploadID)
|
err = obj.AbortMultipartUpload("bucket", "key", uploadID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -191,18 +191,18 @@ func testMultipleObjectCreation(obj ObjectLayer, instanceType string, c TestErrH
|
|||||||
randomString = randomString + strconv.Itoa(num)
|
randomString = randomString + strconv.Itoa(num)
|
||||||
}
|
}
|
||||||
|
|
||||||
expectedMD5Sumhex := getMD5Hash([]byte(randomString))
|
expectedETaghex := getMD5Hash([]byte(randomString))
|
||||||
|
|
||||||
key := "obj" + strconv.Itoa(i)
|
key := "obj" + strconv.Itoa(i)
|
||||||
objects[key] = []byte(randomString)
|
objects[key] = []byte(randomString)
|
||||||
metadata := make(map[string]string)
|
metadata := make(map[string]string)
|
||||||
metadata["md5Sum"] = expectedMD5Sumhex
|
metadata["etag"] = expectedETaghex
|
||||||
var objInfo ObjectInfo
|
var objInfo ObjectInfo
|
||||||
objInfo, err = obj.PutObject("bucket", key, int64(len(randomString)), bytes.NewBufferString(randomString), metadata, "")
|
objInfo, err = obj.PutObject("bucket", key, int64(len(randomString)), bytes.NewBufferString(randomString), metadata, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Fatalf("%s: <ERROR> %s", instanceType, err)
|
c.Fatalf("%s: <ERROR> %s", instanceType, err)
|
||||||
}
|
}
|
||||||
if objInfo.MD5Sum != expectedMD5Sumhex {
|
if objInfo.ETag != expectedETaghex {
|
||||||
c.Errorf("Md5 Mismatch")
|
c.Errorf("Md5 Mismatch")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1151,7 +1151,7 @@ func (s *TestSuiteCommon) TestPutObject(c *C) {
|
|||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
c.Assert(response.StatusCode, Equals, http.StatusOK)
|
c.Assert(response.StatusCode, Equals, http.StatusOK)
|
||||||
// The response Etag header should contain Md5sum of an empty string.
|
// The response Etag header should contain Md5sum of an empty string.
|
||||||
c.Assert(response.Header.Get("Etag"), Equals, "\""+emptyStrMd5Sum+"\"")
|
c.Assert(response.Header.Get("Etag"), Equals, "\""+emptyETag+"\"")
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestListBuckets - Make request for listing of all buckets.
|
// TestListBuckets - Make request for listing of all buckets.
|
||||||
@ -1841,7 +1841,7 @@ func (s *TestSuiteCommon) TestGetObjectLarge11MiB(c *C) {
|
|||||||
getContent, err := ioutil.ReadAll(response.Body)
|
getContent, err := ioutil.ReadAll(response.Body)
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
// Get md5Sum of the response content.
|
// Get etag of the response content.
|
||||||
getMD5 := getMD5Hash(getContent)
|
getMD5 := getMD5Hash(getContent)
|
||||||
|
|
||||||
// Compare putContent and getContent.
|
// Compare putContent and getContent.
|
||||||
@ -2505,8 +2505,8 @@ func (s *TestSuiteCommon) TestObjectValidMD5(c *C) {
|
|||||||
// Create a byte array of 5MB.
|
// Create a byte array of 5MB.
|
||||||
// content for the object to be uploaded.
|
// content for the object to be uploaded.
|
||||||
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
|
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
|
||||||
// calculate md5Sum of the data.
|
// calculate etag of the data.
|
||||||
md5SumBase64 := getMD5HashBase64(data)
|
etagBase64 := getMD5HashBase64(data)
|
||||||
|
|
||||||
buffer1 := bytes.NewReader(data)
|
buffer1 := bytes.NewReader(data)
|
||||||
objectName := "test-1-object"
|
objectName := "test-1-object"
|
||||||
@ -2515,7 +2515,7 @@ func (s *TestSuiteCommon) TestObjectValidMD5(c *C) {
|
|||||||
int64(buffer1.Len()), buffer1, s.accessKey, s.secretKey, s.signer)
|
int64(buffer1.Len()), buffer1, s.accessKey, s.secretKey, s.signer)
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
// set the Content-Md5 to be the hash to content.
|
// set the Content-Md5 to be the hash to content.
|
||||||
request.Header.Set("Content-Md5", md5SumBase64)
|
request.Header.Set("Content-Md5", etagBase64)
|
||||||
client = http.Client{Transport: s.transport}
|
client = http.Client{Transport: s.transport}
|
||||||
response, err = client.Do(request)
|
response, err = client.Do(request)
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
@ -2578,14 +2578,14 @@ func (s *TestSuiteCommon) TestObjectMultipart(c *C) {
|
|||||||
// content for the part to be uploaded.
|
// content for the part to be uploaded.
|
||||||
// Create a byte array of 5MB.
|
// Create a byte array of 5MB.
|
||||||
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
|
data := bytes.Repeat([]byte("0123456789abcdef"), 5*humanize.MiByte/16)
|
||||||
// calculate md5Sum of the data.
|
// calculate etag of the data.
|
||||||
md5SumBase64 := getMD5HashBase64(data)
|
md5SumBase64 := getMD5HashBase64(data)
|
||||||
|
|
||||||
buffer1 := bytes.NewReader(data)
|
buffer1 := bytes.NewReader(data)
|
||||||
// HTTP request for the part to be uploaded.
|
// HTTP request for the part to be uploaded.
|
||||||
request, err = newTestSignedRequest("PUT", getPartUploadURL(s.endPoint, bucketName, objectName, uploadID, "1"),
|
request, err = newTestSignedRequest("PUT", getPartUploadURL(s.endPoint, bucketName, objectName, uploadID, "1"),
|
||||||
int64(buffer1.Len()), buffer1, s.accessKey, s.secretKey, s.signer)
|
int64(buffer1.Len()), buffer1, s.accessKey, s.secretKey, s.signer)
|
||||||
// set the Content-Md5 header to the base64 encoding the md5Sum of the content.
|
// set the Content-Md5 header to the base64 encoding the etag of the content.
|
||||||
request.Header.Set("Content-Md5", md5SumBase64)
|
request.Header.Set("Content-Md5", md5SumBase64)
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
@ -2599,14 +2599,14 @@ func (s *TestSuiteCommon) TestObjectMultipart(c *C) {
|
|||||||
// Create a byte array of 1 byte.
|
// Create a byte array of 1 byte.
|
||||||
data = []byte("0")
|
data = []byte("0")
|
||||||
|
|
||||||
// calculate md5Sum of the data.
|
// calculate etag of the data.
|
||||||
md5SumBase64 = getMD5HashBase64(data)
|
md5SumBase64 = getMD5HashBase64(data)
|
||||||
|
|
||||||
buffer2 := bytes.NewReader(data)
|
buffer2 := bytes.NewReader(data)
|
||||||
// HTTP request for the second part to be uploaded.
|
// HTTP request for the second part to be uploaded.
|
||||||
request, err = newTestSignedRequest("PUT", getPartUploadURL(s.endPoint, bucketName, objectName, uploadID, "2"),
|
request, err = newTestSignedRequest("PUT", getPartUploadURL(s.endPoint, bucketName, objectName, uploadID, "2"),
|
||||||
int64(buffer2.Len()), buffer2, s.accessKey, s.secretKey, s.signer)
|
int64(buffer2.Len()), buffer2, s.accessKey, s.secretKey, s.signer)
|
||||||
// set the Content-Md5 header to the base64 encoding the md5Sum of the content.
|
// set the Content-Md5 header to the base64 encoding the etag of the content.
|
||||||
request.Header.Set("Content-Md5", md5SumBase64)
|
request.Header.Set("Content-Md5", md5SumBase64)
|
||||||
c.Assert(err, IsNil)
|
c.Assert(err, IsNil)
|
||||||
|
|
||||||
|
@ -370,7 +370,7 @@ func testListObjectsWebHandler(obj ObjectLayer, instanceType string, t TestErrHa
|
|||||||
|
|
||||||
data := bytes.Repeat([]byte("a"), objectSize)
|
data := bytes.Repeat([]byte("a"), objectSize)
|
||||||
|
|
||||||
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data), map[string]string{"md5Sum": "c9a34cfc85d982698c6ac89f76071abd"}, "")
|
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data), map[string]string{"etag": "c9a34cfc85d982698c6ac89f76071abd"}, "")
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Was not able to upload an object, %v", err)
|
t.Fatalf("Was not able to upload an object, %v", err)
|
||||||
@ -465,14 +465,14 @@ func testRemoveObjectWebHandler(obj ObjectLayer, instanceType string, t TestErrH
|
|||||||
data := bytes.Repeat([]byte("a"), objectSize)
|
data := bytes.Repeat([]byte("a"), objectSize)
|
||||||
|
|
||||||
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data),
|
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data),
|
||||||
map[string]string{"md5Sum": "c9a34cfc85d982698c6ac89f76071abd"}, "")
|
map[string]string{"etag": "c9a34cfc85d982698c6ac89f76071abd"}, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Was not able to upload an object, %v", err)
|
t.Fatalf("Was not able to upload an object, %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
objectName = "a/object"
|
objectName = "a/object"
|
||||||
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data),
|
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data),
|
||||||
map[string]string{"md5Sum": "c9a34cfc85d982698c6ac89f76071abd"}, "")
|
map[string]string{"etag": "c9a34cfc85d982698c6ac89f76071abd"}, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Was not able to upload an object, %v", err)
|
t.Fatalf("Was not able to upload an object, %v", err)
|
||||||
}
|
}
|
||||||
@ -788,7 +788,7 @@ func testDownloadWebHandler(obj ObjectLayer, instanceType string, t TestErrHandl
|
|||||||
}
|
}
|
||||||
|
|
||||||
content := []byte("temporary file's content")
|
content := []byte("temporary file's content")
|
||||||
_, err = obj.PutObject(bucketName, objectName, int64(len(content)), bytes.NewReader(content), map[string]string{"md5Sum": "01ce59706106fe5e02e7f55fffda7f34"}, "")
|
_, err = obj.PutObject(bucketName, objectName, int64(len(content)), bytes.NewReader(content), map[string]string{"etag": "01ce59706106fe5e02e7f55fffda7f34"}, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Was not able to upload an object, %v", err)
|
t.Fatalf("Was not able to upload an object, %v", err)
|
||||||
}
|
}
|
||||||
@ -940,7 +940,7 @@ func testWebPresignedGetHandler(obj ObjectLayer, instanceType string, t TestErrH
|
|||||||
}
|
}
|
||||||
|
|
||||||
data := bytes.Repeat([]byte("a"), objectSize)
|
data := bytes.Repeat([]byte("a"), objectSize)
|
||||||
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data), map[string]string{"md5Sum": "c9a34cfc85d982698c6ac89f76071abd"}, "")
|
_, err = obj.PutObject(bucketName, objectName, int64(len(data)), bytes.NewReader(data), map[string]string{"etag": "c9a34cfc85d982698c6ac89f76071abd"}, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Was not able to upload an object, %v", err)
|
t.Fatalf("Was not able to upload an object, %v", err)
|
||||||
}
|
}
|
||||||
|
@ -146,7 +146,10 @@ type xlMetaV1 struct {
|
|||||||
// XL metadata constants.
|
// XL metadata constants.
|
||||||
const (
|
const (
|
||||||
// XL meta version.
|
// XL meta version.
|
||||||
xlMetaVersion = "1.0.0"
|
xlMetaVersion = "1.0.1"
|
||||||
|
|
||||||
|
// XL meta version.
|
||||||
|
xlMetaVersion100 = "1.0.0"
|
||||||
|
|
||||||
// XL meta format string.
|
// XL meta format string.
|
||||||
xlMetaFormat = "xl"
|
xlMetaFormat = "xl"
|
||||||
@ -173,7 +176,38 @@ func newXLMetaV1(object string, dataBlocks, parityBlocks int) (xlMeta xlMetaV1)
|
|||||||
// IsValid - tells if the format is sane by validating the version
|
// IsValid - tells if the format is sane by validating the version
|
||||||
// string and format style.
|
// string and format style.
|
||||||
func (m xlMetaV1) IsValid() bool {
|
func (m xlMetaV1) IsValid() bool {
|
||||||
return m.Version == xlMetaVersion && m.Format == xlMetaFormat
|
return isXLMetaValid(m.Version, m.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verifies if the backend format metadata is sane by validating
|
||||||
|
// the version string and format style.
|
||||||
|
func isXLMetaValid(version, format string) bool {
|
||||||
|
return ((version == xlMetaVersion || version == xlMetaVersion100) &&
|
||||||
|
format == xlMetaFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Converts metadata to object info.
|
||||||
|
func (m xlMetaV1) ToObjectInfo(bucket, object string) ObjectInfo {
|
||||||
|
objInfo := ObjectInfo{
|
||||||
|
IsDir: false,
|
||||||
|
Bucket: bucket,
|
||||||
|
Name: object,
|
||||||
|
Size: m.Stat.Size,
|
||||||
|
ModTime: m.Stat.ModTime,
|
||||||
|
ContentType: m.Meta["content-type"],
|
||||||
|
ContentEncoding: m.Meta["content-encoding"],
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract etag from metadata.
|
||||||
|
objInfo.ETag = extractETag(m.Meta)
|
||||||
|
|
||||||
|
// etag/md5Sum has already been extracted. We need to
|
||||||
|
// remove to avoid it from appearing as part of
|
||||||
|
// response headers. e.g, X-Minio-* or X-Amz-*.
|
||||||
|
objInfo.UserDefined = cleanMetaETag(m.Meta)
|
||||||
|
|
||||||
|
// Success.
|
||||||
|
return objInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
// objectPartIndex - returns the index of matching object part number.
|
// objectPartIndex - returns the index of matching object part number.
|
||||||
|
@ -973,7 +973,7 @@ func (xl xlObjects) CompleteMultipartUpload(bucket string, object string, upload
|
|||||||
xlMeta.Stat.ModTime = UTCNow()
|
xlMeta.Stat.ModTime = UTCNow()
|
||||||
|
|
||||||
// Save successfully calculated md5sum.
|
// Save successfully calculated md5sum.
|
||||||
xlMeta.Meta["md5Sum"] = s3MD5
|
xlMeta.Meta["etag"] = s3MD5
|
||||||
uploadIDPath = path.Join(bucket, object, uploadID)
|
uploadIDPath = path.Join(bucket, object, uploadID)
|
||||||
tempUploadIDPath := uploadID
|
tempUploadIDPath := uploadID
|
||||||
|
|
||||||
@ -1061,7 +1061,7 @@ func (xl xlObjects) CompleteMultipartUpload(bucket string, object string, upload
|
|||||||
Name: object,
|
Name: object,
|
||||||
Size: xlMeta.Stat.Size,
|
Size: xlMeta.Stat.Size,
|
||||||
ModTime: xlMeta.Stat.ModTime,
|
ModTime: xlMeta.Stat.ModTime,
|
||||||
MD5Sum: xlMeta.Meta["md5Sum"],
|
ETag: xlMeta.Meta["etag"],
|
||||||
ContentType: xlMeta.Meta["content-type"],
|
ContentType: xlMeta.Meta["content-type"],
|
||||||
ContentEncoding: xlMeta.Meta["content-encoding"],
|
ContentEncoding: xlMeta.Meta["content-encoding"],
|
||||||
UserDefined: xlMeta.Meta,
|
UserDefined: xlMeta.Meta,
|
||||||
|
@ -101,23 +101,7 @@ func (xl xlObjects) CopyObject(srcBucket, srcObject, dstBucket, dstObject string
|
|||||||
if err = renameXLMetadata(onlineDisks, minioMetaTmpBucket, tempObj, srcBucket, srcObject, xl.writeQuorum); err != nil {
|
if err = renameXLMetadata(onlineDisks, minioMetaTmpBucket, tempObj, srcBucket, srcObject, xl.writeQuorum); err != nil {
|
||||||
return ObjectInfo{}, toObjectErr(err, srcBucket, srcObject)
|
return ObjectInfo{}, toObjectErr(err, srcBucket, srcObject)
|
||||||
}
|
}
|
||||||
|
return xlMeta.ToObjectInfo(srcBucket, srcObject), nil
|
||||||
objInfo := ObjectInfo{
|
|
||||||
IsDir: false,
|
|
||||||
Bucket: srcBucket,
|
|
||||||
Name: srcObject,
|
|
||||||
Size: xlMeta.Stat.Size,
|
|
||||||
ModTime: xlMeta.Stat.ModTime,
|
|
||||||
MD5Sum: xlMeta.Meta["md5Sum"],
|
|
||||||
ContentType: xlMeta.Meta["content-type"],
|
|
||||||
ContentEncoding: xlMeta.Meta["content-encoding"],
|
|
||||||
}
|
|
||||||
// md5Sum has already been extracted into objInfo.MD5Sum. We
|
|
||||||
// need to remove it from xlMetaMap to avoid it from appearing as
|
|
||||||
// part of response headers. e.g, X-Minio-* or X-Amz-*.
|
|
||||||
delete(xlMeta.Meta, "md5Sum")
|
|
||||||
objInfo.UserDefined = xlMeta.Meta
|
|
||||||
return objInfo, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize pipe.
|
// Initialize pipe.
|
||||||
@ -333,10 +317,9 @@ func (xl xlObjects) GetObjectInfo(bucket, object string) (ObjectInfo, error) {
|
|||||||
|
|
||||||
// getObjectInfo - wrapper for reading object metadata and constructs ObjectInfo.
|
// getObjectInfo - wrapper for reading object metadata and constructs ObjectInfo.
|
||||||
func (xl xlObjects) getObjectInfo(bucket, object string) (objInfo ObjectInfo, err error) {
|
func (xl xlObjects) getObjectInfo(bucket, object string) (objInfo ObjectInfo, err error) {
|
||||||
// returns xl meta map and stat info.
|
// Extracts xlStat and xlMetaMap.
|
||||||
xlStat, xlMetaMap, err := xl.readXLMetaStat(bucket, object)
|
xlStat, xlMetaMap, err := xl.readXLMetaStat(bucket, object)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Return error.
|
|
||||||
return ObjectInfo{}, err
|
return ObjectInfo{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -346,17 +329,19 @@ func (xl xlObjects) getObjectInfo(bucket, object string) (objInfo ObjectInfo, er
|
|||||||
Name: object,
|
Name: object,
|
||||||
Size: xlStat.Size,
|
Size: xlStat.Size,
|
||||||
ModTime: xlStat.ModTime,
|
ModTime: xlStat.ModTime,
|
||||||
MD5Sum: xlMetaMap["md5Sum"],
|
|
||||||
ContentType: xlMetaMap["content-type"],
|
ContentType: xlMetaMap["content-type"],
|
||||||
ContentEncoding: xlMetaMap["content-encoding"],
|
ContentEncoding: xlMetaMap["content-encoding"],
|
||||||
}
|
}
|
||||||
|
|
||||||
// md5Sum has already been extracted into objInfo.MD5Sum. We
|
// Extract etag.
|
||||||
// need to remove it from xlMetaMap to avoid it from appearing as
|
objInfo.ETag = extractETag(xlMetaMap)
|
||||||
// part of response headers. e.g, X-Minio-* or X-Amz-*.
|
|
||||||
|
|
||||||
delete(xlMetaMap, "md5Sum")
|
// etag/md5Sum has already been extracted. We need to
|
||||||
objInfo.UserDefined = xlMetaMap
|
// remove to avoid it from appearing as part of
|
||||||
|
// response headers. e.g, X-Minio-* or X-Amz-*.
|
||||||
|
objInfo.UserDefined = cleanMetaETag(xlMetaMap)
|
||||||
|
|
||||||
|
// Success.
|
||||||
return objInfo, nil
|
return objInfo, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -650,8 +635,8 @@ func (xl xlObjects) PutObject(bucket string, object string, size int64, data io.
|
|||||||
|
|
||||||
newMD5Hex := hex.EncodeToString(md5Writer.Sum(nil))
|
newMD5Hex := hex.EncodeToString(md5Writer.Sum(nil))
|
||||||
// Update the md5sum if not set with the newly calculated one.
|
// Update the md5sum if not set with the newly calculated one.
|
||||||
if len(metadata["md5Sum"]) == 0 {
|
if len(metadata["etag"]) == 0 {
|
||||||
metadata["md5Sum"] = newMD5Hex
|
metadata["etag"] = newMD5Hex
|
||||||
}
|
}
|
||||||
|
|
||||||
// Guess content-type from the extension if possible.
|
// Guess content-type from the extension if possible.
|
||||||
@ -664,7 +649,7 @@ func (xl xlObjects) PutObject(bucket string, object string, size int64, data io.
|
|||||||
}
|
}
|
||||||
|
|
||||||
// md5Hex representation.
|
// md5Hex representation.
|
||||||
md5Hex := metadata["md5Sum"]
|
md5Hex := metadata["etag"]
|
||||||
if md5Hex != "" {
|
if md5Hex != "" {
|
||||||
if newMD5Hex != md5Hex {
|
if newMD5Hex != md5Hex {
|
||||||
// Returns md5 mismatch.
|
// Returns md5 mismatch.
|
||||||
@ -730,7 +715,7 @@ func (xl xlObjects) PutObject(bucket string, object string, size int64, data io.
|
|||||||
Name: object,
|
Name: object,
|
||||||
Size: xlMeta.Stat.Size,
|
Size: xlMeta.Stat.Size,
|
||||||
ModTime: xlMeta.Stat.ModTime,
|
ModTime: xlMeta.Stat.ModTime,
|
||||||
MD5Sum: xlMeta.Meta["md5Sum"],
|
ETag: xlMeta.Meta["etag"],
|
||||||
ContentType: xlMeta.Meta["content-type"],
|
ContentType: xlMeta.Meta["content-type"],
|
||||||
ContentEncoding: xlMeta.Meta["content-encoding"],
|
ContentEncoding: xlMeta.Meta["content-encoding"],
|
||||||
UserDefined: xlMeta.Meta,
|
UserDefined: xlMeta.Meta,
|
||||||
|
@ -253,6 +253,19 @@ func readXLMetaStat(disk StorageAPI, bucket string, object string) (statInfo, ma
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return statInfo{}, nil, traceError(err)
|
return statInfo{}, nil, traceError(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// obtain version.
|
||||||
|
xlVersion := parseXLVersion(xlMetaBuf)
|
||||||
|
|
||||||
|
// obtain format.
|
||||||
|
xlFormat := parseXLFormat(xlMetaBuf)
|
||||||
|
|
||||||
|
// Validate if the xl.json we read is sane, return corrupted format.
|
||||||
|
if !isXLMetaValid(xlVersion, xlFormat) {
|
||||||
|
// For version mismatchs and unrecognized format, return corrupted format.
|
||||||
|
return statInfo{}, nil, traceError(errCorruptedFormat)
|
||||||
|
}
|
||||||
|
|
||||||
// obtain xlMetaV1{}.Meta using `github.com/tidwall/gjson`.
|
// obtain xlMetaV1{}.Meta using `github.com/tidwall/gjson`.
|
||||||
xlMetaMap := parseXLMetaMap(xlMetaBuf)
|
xlMetaMap := parseXLMetaMap(xlMetaBuf)
|
||||||
|
|
||||||
@ -261,6 +274,7 @@ func readXLMetaStat(disk StorageAPI, bucket string, object string) (statInfo, ma
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return statInfo{}, nil, traceError(err)
|
return statInfo{}, nil, traceError(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return structured `xl.json`.
|
// Return structured `xl.json`.
|
||||||
return xlStat, xlMetaMap, nil
|
return xlStat, xlMetaMap, nil
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
"release": "DEVELOPMENT.GOGET"
|
"release": "DEVELOPMENT.GOGET"
|
||||||
},
|
},
|
||||||
"meta": {
|
"meta": {
|
||||||
|
"etag": "97586a5290d4f5a41328062d6a7da593-3",
|
||||||
"content-type": "binary/octet-stream",
|
"content-type": "binary/octet-stream",
|
||||||
"content-encoding": "gzip"
|
"content-encoding": "gzip"
|
||||||
},
|
},
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"meta": {
|
"meta": {
|
||||||
"md5Sum": "97586a5290d4f5a41328062d6a7da593-3",
|
"etag": "97586a5290d4f5a41328062d6a7da593-3",
|
||||||
"content-type": "application\/octet-stream",
|
"content-type": "application\/octet-stream",
|
||||||
"content-encoding": "gzip"
|
"content-encoding": "gzip"
|
||||||
},
|
},
|
||||||
|
Loading…
Reference in New Issue
Block a user