mirror of
https://github.com/minio/minio.git
synced 2025-05-22 18:11:50 -04:00
api: extract http headers with some supported header list. (#2268)
This commit is contained in:
parent
55cb55675c
commit
f85d94288d
@ -62,20 +62,19 @@ func setObjectHeaders(w http.ResponseWriter, objInfo ObjectInfo, contentRange *h
|
|||||||
// set common headers
|
// set common headers
|
||||||
setCommonHeaders(w)
|
setCommonHeaders(w)
|
||||||
|
|
||||||
// set object-related metadata headers
|
// Set content length.
|
||||||
|
w.Header().Set("Content-Length", strconv.FormatInt(objInfo.Size, 10))
|
||||||
|
|
||||||
|
// Set last modified time.
|
||||||
lastModified := objInfo.ModTime.UTC().Format(http.TimeFormat)
|
lastModified := objInfo.ModTime.UTC().Format(http.TimeFormat)
|
||||||
w.Header().Set("Last-Modified", lastModified)
|
w.Header().Set("Last-Modified", lastModified)
|
||||||
|
|
||||||
if objInfo.ContentType != "" {
|
// Set Etag if available.
|
||||||
w.Header().Set("Content-Type", objInfo.ContentType)
|
|
||||||
}
|
|
||||||
if objInfo.MD5Sum != "" {
|
if objInfo.MD5Sum != "" {
|
||||||
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
|
w.Header().Set("ETag", "\""+objInfo.MD5Sum+"\"")
|
||||||
}
|
}
|
||||||
if objInfo.ContentEncoding != "" {
|
|
||||||
w.Header().Set("Content-Encoding", objInfo.ContentEncoding)
|
// Set all other user defined metadata.
|
||||||
}
|
|
||||||
w.Header().Set("Content-Length", strconv.FormatInt(objInfo.Size, 10))
|
|
||||||
for k, v := range objInfo.UserDefined {
|
for k, v := range objInfo.UserDefined {
|
||||||
w.Header().Set(k, v)
|
w.Header().Set(k, v)
|
||||||
}
|
}
|
||||||
|
11
fs-v1.go
11
fs-v1.go
@ -314,12 +314,15 @@ func (fs fsObjects) GetObjectInfo(bucket, object string) (ObjectInfo, error) {
|
|||||||
return ObjectInfo{}, toObjectErr(err, bucket, object)
|
return ObjectInfo{}, toObjectErr(err, bucket, object)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(fsMeta.Meta) == 0 {
|
||||||
|
fsMeta.Meta = make(map[string]string)
|
||||||
|
}
|
||||||
|
|
||||||
// Guess content-type from the extension if possible.
|
// Guess content-type from the extension if possible.
|
||||||
contentType := fsMeta.Meta["content-type"]
|
if fsMeta.Meta["content-type"] == "" {
|
||||||
if contentType == "" {
|
|
||||||
if objectExt := filepath.Ext(object); objectExt != "" {
|
if objectExt := filepath.Ext(object); objectExt != "" {
|
||||||
if content, ok := mimedb.DB[strings.ToLower(strings.TrimPrefix(objectExt, "."))]; ok {
|
if content, ok := mimedb.DB[strings.ToLower(strings.TrimPrefix(objectExt, "."))]; ok {
|
||||||
contentType = content.ContentType
|
fsMeta.Meta["content-type"] = content.ContentType
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -332,7 +335,7 @@ func (fs fsObjects) GetObjectInfo(bucket, object string) (ObjectInfo, error) {
|
|||||||
Size: fi.Size,
|
Size: fi.Size,
|
||||||
IsDir: fi.Mode.IsDir(),
|
IsDir: fi.Mode.IsDir(),
|
||||||
MD5Sum: fsMeta.Meta["md5Sum"],
|
MD5Sum: fsMeta.Meta["md5Sum"],
|
||||||
ContentType: contentType,
|
ContentType: fsMeta.Meta["content-type"],
|
||||||
ContentEncoding: fsMeta.Meta["content-encoding"],
|
ContentEncoding: fsMeta.Meta["content-encoding"],
|
||||||
UserDefined: fsMeta.Meta,
|
UserDefined: fsMeta.Meta,
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -19,6 +19,7 @@ package main
|
|||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Validates location constraint in PutBucket request body.
|
// Validates location constraint in PutBucket request body.
|
||||||
@ -58,3 +59,39 @@ func isValidLocationConstraint(r *http.Request) (s3Error APIErrorCode) {
|
|||||||
}
|
}
|
||||||
return s3Error
|
return s3Error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Supported headers that needs to be extracted.
|
||||||
|
var supportedHeaders = []string{
|
||||||
|
"content-type",
|
||||||
|
"cache-control",
|
||||||
|
"content-encoding",
|
||||||
|
"content-disposition",
|
||||||
|
// Add more supported headers here.
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractMetadataFromHeader extracts metadata from HTTP header.
|
||||||
|
func extractMetadataFromHeader(header http.Header) map[string]string {
|
||||||
|
metadata := make(map[string]string)
|
||||||
|
// Save standard supported headers.
|
||||||
|
for _, supportedHeader := range supportedHeaders {
|
||||||
|
canonicalHeader := http.CanonicalHeaderKey(supportedHeader)
|
||||||
|
// HTTP headers are case insensitive, look for both canonical
|
||||||
|
// and non canonical entries.
|
||||||
|
if _, ok := header[canonicalHeader]; ok {
|
||||||
|
metadata[supportedHeader] = header.Get(canonicalHeader)
|
||||||
|
} else if _, ok := header[supportedHeader]; ok {
|
||||||
|
metadata[supportedHeader] = header.Get(supportedHeader)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Go through all other headers for any additional headers that needs to be saved.
|
||||||
|
for key := range header {
|
||||||
|
cKey := http.CanonicalHeaderKey(key)
|
||||||
|
if strings.HasPrefix(cKey, "X-Amz-Meta-") {
|
||||||
|
metadata[cKey] = header.Get(cKey)
|
||||||
|
} else if strings.HasPrefix(key, "X-Minio-Meta-") {
|
||||||
|
metadata[cKey] = header.Get(cKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Return.
|
||||||
|
return metadata
|
||||||
|
}
|
||||||
|
@ -21,6 +21,7 @@ import (
|
|||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -84,3 +85,36 @@ func TestIsValidLocationContraint(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Tests validate metadata extraction from http headers.
|
||||||
|
func TestExtractMetadataHeaders(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
header http.Header
|
||||||
|
metadata map[string]string
|
||||||
|
}{
|
||||||
|
// Validate if there a known 'content-type'.
|
||||||
|
{
|
||||||
|
header: http.Header{
|
||||||
|
"Content-Type": []string{"image/png"},
|
||||||
|
},
|
||||||
|
metadata: map[string]string{
|
||||||
|
"content-type": "image/png",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Validate if there are no keys to extract.
|
||||||
|
{
|
||||||
|
header: http.Header{
|
||||||
|
"test-1": []string{"123"},
|
||||||
|
},
|
||||||
|
metadata: map[string]string{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate if the extracting headers.
|
||||||
|
for i, testCase := range testCases {
|
||||||
|
metadata := extractMetadataFromHeader(testCase.header)
|
||||||
|
if !reflect.DeepEqual(metadata, testCase.metadata) {
|
||||||
|
t.Fatalf("Test %d failed: Expected \"%#v\", got \"%#v\"", i+1, testCase.metadata, metadata)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -325,12 +325,8 @@ func (api objectAPIHandlers) CopyObjectHandler(w http.ResponseWriter, r *http.Re
|
|||||||
// Save metadata.
|
// Save metadata.
|
||||||
metadata := make(map[string]string)
|
metadata := make(map[string]string)
|
||||||
// Save other metadata if available.
|
// Save other metadata if available.
|
||||||
if objInfo.ContentType != "" {
|
metadata = objInfo.UserDefined
|
||||||
metadata["content-type"] = objInfo.ContentType
|
|
||||||
}
|
|
||||||
if objInfo.ContentEncoding != "" {
|
|
||||||
metadata["content-encoding"] = objInfo.ContentEncoding
|
|
||||||
}
|
|
||||||
// Do not set `md5sum` as CopyObject will not keep the
|
// Do not set `md5sum` as CopyObject will not keep the
|
||||||
// same md5sum as the source.
|
// same md5sum as the source.
|
||||||
|
|
||||||
@ -392,27 +388,10 @@ func (api objectAPIHandlers) PutObjectHandler(w http.ResponseWriter, r *http.Req
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save metadata.
|
// Extract metadata to be saved from incoming HTTP header.
|
||||||
metadata := make(map[string]string)
|
metadata := extractMetadataFromHeader(r.Header)
|
||||||
// Make sure we hex encode md5sum here.
|
// Make sure we hex encode md5sum here.
|
||||||
metadata["md5Sum"] = hex.EncodeToString(md5Bytes)
|
metadata["md5Sum"] = hex.EncodeToString(md5Bytes)
|
||||||
// Save other metadata if available.
|
|
||||||
contentType := r.Header.Get("Content-Type")
|
|
||||||
if contentType != "" {
|
|
||||||
metadata["content-type"] = contentType
|
|
||||||
}
|
|
||||||
contentEncoding := r.Header.Get("Content-Encoding")
|
|
||||||
if contentEncoding != "" {
|
|
||||||
metadata["content-encoding"] = contentEncoding
|
|
||||||
}
|
|
||||||
for key := range r.Header {
|
|
||||||
cKey := http.CanonicalHeaderKey(key)
|
|
||||||
if strings.HasPrefix(cKey, "X-Amz-Meta-") {
|
|
||||||
metadata[cKey] = r.Header.Get(cKey)
|
|
||||||
} else if strings.HasPrefix(key, "X-Minio-Meta-") {
|
|
||||||
metadata[cKey] = r.Header.Get(cKey)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var md5Sum string
|
var md5Sum string
|
||||||
switch getRequestAuthType(r) {
|
switch getRequestAuthType(r) {
|
||||||
@ -472,25 +451,8 @@ func (api objectAPIHandlers) NewMultipartUploadHandler(w http.ResponseWriter, r
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save metadata.
|
// Extract metadata that needs to be saved.
|
||||||
metadata := make(map[string]string)
|
metadata := extractMetadataFromHeader(r.Header)
|
||||||
// Save other metadata if available.
|
|
||||||
contentType := r.Header.Get("Content-Type")
|
|
||||||
if contentType != "" {
|
|
||||||
metadata["content-type"] = contentType
|
|
||||||
}
|
|
||||||
contentEncoding := r.Header.Get("Content-Encoding")
|
|
||||||
if contentEncoding != "" {
|
|
||||||
metadata["content-encoding"] = contentEncoding
|
|
||||||
}
|
|
||||||
for key := range r.Header {
|
|
||||||
cKey := http.CanonicalHeaderKey(key)
|
|
||||||
if strings.HasPrefix(cKey, "X-Amz-Meta-") {
|
|
||||||
metadata[cKey] = r.Header.Get(cKey)
|
|
||||||
} else if strings.HasPrefix(key, "X-Minio-Meta-") {
|
|
||||||
metadata[cKey] = r.Header.Get(cKey)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
uploadID, err := api.ObjectAPI.NewMultipartUpload(bucket, object, metadata)
|
uploadID, err := api.ObjectAPI.NewMultipartUpload(bucket, object, metadata)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user