mirror of
https://github.com/minio/minio.git
synced 2025-11-09 13:39:46 -05:00
signature: Rewrite signature handling and move it into a library.
This commit is contained in:
@@ -23,7 +23,6 @@ import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// File container provided for atomic file writes
|
||||
@@ -82,7 +81,6 @@ func FileCreateWithPrefix(filePath string, prefix string) (*File, error) {
|
||||
if err := os.MkdirAll(filepath.Dir(filePath), 0700); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
prefix = strings.TrimSpace(prefix)
|
||||
f, err := ioutil.TempFile(filepath.Dir(filePath), prefix+filepath.Base(filePath))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -19,7 +19,6 @@ package fs
|
||||
import (
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
)
|
||||
@@ -167,7 +166,7 @@ var validBucket = regexp.MustCompile(`^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$`)
|
||||
// IsValidBucketName - verify bucket name in accordance with
|
||||
// - http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingBucket.html
|
||||
func IsValidBucketName(bucket string) bool {
|
||||
if strings.TrimSpace(bucket) == "" {
|
||||
if bucket == "" {
|
||||
return false
|
||||
}
|
||||
if len(bucket) < 3 || len(bucket) > 63 {
|
||||
@@ -182,7 +181,7 @@ func IsValidBucketName(bucket string) bool {
|
||||
// IsValidObjectName - verify object name in accordance with
|
||||
// - http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
|
||||
func IsValidObjectName(object string) bool {
|
||||
if strings.TrimSpace(object) == "" {
|
||||
if object == "" {
|
||||
return true
|
||||
}
|
||||
if len(object) > 1024 || len(object) == 0 {
|
||||
|
||||
@@ -18,25 +18,11 @@ package fs
|
||||
|
||||
import "fmt"
|
||||
|
||||
// MissingDateHeader date header missing
|
||||
type MissingDateHeader struct{}
|
||||
// SignDoesNotMatch - signature does not match.
|
||||
type SignDoesNotMatch struct{}
|
||||
|
||||
func (e MissingDateHeader) Error() string {
|
||||
return "Missing date header"
|
||||
}
|
||||
|
||||
// MissingExpiresQuery expires query string missing
|
||||
type MissingExpiresQuery struct{}
|
||||
|
||||
func (e MissingExpiresQuery) Error() string {
|
||||
return "Missing expires query string"
|
||||
}
|
||||
|
||||
// ExpiredPresignedRequest request already expired
|
||||
type ExpiredPresignedRequest struct{}
|
||||
|
||||
func (e ExpiredPresignedRequest) Error() string {
|
||||
return "Presigned request already expired"
|
||||
func (e SignDoesNotMatch) Error() string {
|
||||
return "Signature does not match."
|
||||
}
|
||||
|
||||
// InvalidArgument invalid argument
|
||||
@@ -156,30 +142,8 @@ func (e BadDigest) Error() string {
|
||||
return "Bad digest"
|
||||
}
|
||||
|
||||
// ParityOverflow parity over flow
|
||||
type ParityOverflow struct{}
|
||||
|
||||
func (e ParityOverflow) Error() string {
|
||||
return "Parity overflow"
|
||||
}
|
||||
|
||||
// ChecksumMismatch checksum mismatch
|
||||
type ChecksumMismatch struct{}
|
||||
|
||||
func (e ChecksumMismatch) Error() string {
|
||||
return "Checksum mismatch"
|
||||
}
|
||||
|
||||
// MissingPOSTPolicy missing post policy
|
||||
type MissingPOSTPolicy struct{}
|
||||
|
||||
func (e MissingPOSTPolicy) Error() string {
|
||||
return "Missing POST policy in multipart form"
|
||||
}
|
||||
|
||||
// InternalError - generic internal error
|
||||
type InternalError struct {
|
||||
}
|
||||
type InternalError struct{}
|
||||
|
||||
// BackendError - generic disk backend error
|
||||
type BackendError struct {
|
||||
@@ -237,13 +201,6 @@ type BucketNameInvalid GenericBucketError
|
||||
|
||||
/// Object related errors
|
||||
|
||||
// EntityTooLarge - object size exceeds maximum limit
|
||||
type EntityTooLarge struct {
|
||||
GenericObjectError
|
||||
Size string
|
||||
MaxSize string
|
||||
}
|
||||
|
||||
// ObjectNameInvalid - object name provided is invalid
|
||||
type ObjectNameInvalid GenericObjectError
|
||||
|
||||
@@ -292,11 +249,6 @@ func (e ObjectNameInvalid) Error() string {
|
||||
return "Object name invalid: " + e.Bucket + "#" + e.Object
|
||||
}
|
||||
|
||||
// Return string an error formatted as the given text
|
||||
func (e EntityTooLarge) Error() string {
|
||||
return e.Bucket + "#" + e.Object + "with " + e.Size + "reached maximum allowed size limit " + e.MaxSize
|
||||
}
|
||||
|
||||
// IncompleteBody You did not provide the number of bytes specified by the Content-Length HTTP header
|
||||
type IncompleteBody GenericObjectError
|
||||
|
||||
|
||||
@@ -68,9 +68,11 @@ func (fs Filesystem) listObjects(bucket, prefix, marker, delimiter string, maxKe
|
||||
// Bucket path prefix should always end with a separator.
|
||||
bucketPathPrefix := bucketPath + string(os.PathSeparator)
|
||||
prefixPath := bucketPathPrefix + prefix
|
||||
st, err := os.Stat(prefixPath)
|
||||
if err != nil && os.IsNotExist(err) {
|
||||
walkPath = bucketPath
|
||||
st, e := os.Stat(prefixPath)
|
||||
if e != nil {
|
||||
if os.IsNotExist(e) {
|
||||
walkPath = bucketPath
|
||||
}
|
||||
} else {
|
||||
if st.IsDir() && !strings.HasSuffix(prefix, delimiter) {
|
||||
walkPath = bucketPath
|
||||
|
||||
@@ -152,7 +152,7 @@ func (fs Filesystem) MakeBucket(bucket, acl string) *probe.Error {
|
||||
}
|
||||
return probe.NewError(e)
|
||||
}
|
||||
if strings.TrimSpace(acl) == "" {
|
||||
if acl == "" {
|
||||
acl = "private"
|
||||
}
|
||||
|
||||
@@ -232,7 +232,7 @@ func (fs Filesystem) SetBucketMetadata(bucket string, metadata map[string]string
|
||||
if !IsValidBucketACL(acl) {
|
||||
return probe.NewError(InvalidACL{ACL: acl})
|
||||
}
|
||||
if strings.TrimSpace(acl) == "" {
|
||||
if acl == "" {
|
||||
acl = "private"
|
||||
}
|
||||
bucket = fs.denormalizeBucket(bucket)
|
||||
|
||||
@@ -174,7 +174,15 @@ func saveParts(partPathPrefix string, mw io.Writer, parts []CompletePart) *probe
|
||||
md5Sum = strings.TrimSuffix(md5Sum, "\"")
|
||||
partFile, e := os.OpenFile(partPathPrefix+md5Sum+fmt.Sprintf("$%d-$multiparts", part.PartNumber), os.O_RDONLY, 0600)
|
||||
if e != nil {
|
||||
return probe.NewError(e)
|
||||
if !os.IsNotExist(e) {
|
||||
return probe.NewError(e)
|
||||
}
|
||||
// Some clients do not set Content-MD5, so we would have
|
||||
// created part files without 'ETag' in them.
|
||||
partFile, e = os.OpenFile(partPathPrefix+fmt.Sprintf("$%d-$multiparts", part.PartNumber), os.O_RDONLY, 0600)
|
||||
if e != nil {
|
||||
return probe.NewError(e)
|
||||
}
|
||||
}
|
||||
partReaders = append(partReaders, partFile)
|
||||
partClosers = append(partClosers, partFile)
|
||||
@@ -322,9 +330,9 @@ func (fs Filesystem) CreateObjectPart(bucket, object, uploadID, expectedMD5Sum s
|
||||
return "", probe.NewError(InvalidUploadID{UploadID: uploadID})
|
||||
}
|
||||
|
||||
if strings.TrimSpace(expectedMD5Sum) != "" {
|
||||
if expectedMD5Sum != "" {
|
||||
var expectedMD5SumBytes []byte
|
||||
expectedMD5SumBytes, err = base64.StdEncoding.DecodeString(strings.TrimSpace(expectedMD5Sum))
|
||||
expectedMD5SumBytes, err = base64.StdEncoding.DecodeString(expectedMD5Sum)
|
||||
if err != nil {
|
||||
// Pro-actively close the connection
|
||||
return "", probe.NewError(InvalidDigest{MD5: expectedMD5Sum})
|
||||
@@ -361,8 +369,8 @@ func (fs Filesystem) CreateObjectPart(bucket, object, uploadID, expectedMD5Sum s
|
||||
md5sum := hex.EncodeToString(md5Hasher.Sum(nil))
|
||||
// Verify if the written object is equal to what is expected, only
|
||||
// if it is requested as such.
|
||||
if strings.TrimSpace(expectedMD5Sum) != "" {
|
||||
if !isMD5SumEqual(strings.TrimSpace(expectedMD5Sum), md5sum) {
|
||||
if expectedMD5Sum != "" {
|
||||
if !isMD5SumEqual(expectedMD5Sum, md5sum) {
|
||||
partFile.CloseAndPurge()
|
||||
return "", probe.NewError(BadDigest{MD5: expectedMD5Sum, Bucket: bucket, Object: object})
|
||||
}
|
||||
@@ -375,7 +383,7 @@ func (fs Filesystem) CreateObjectPart(bucket, object, uploadID, expectedMD5Sum s
|
||||
}
|
||||
if !ok {
|
||||
partFile.CloseAndPurge()
|
||||
return "", probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return "", probe.NewError(SignDoesNotMatch{})
|
||||
}
|
||||
}
|
||||
partFile.Close()
|
||||
@@ -472,7 +480,7 @@ func (fs Filesystem) CompleteMultipartUpload(bucket, object, uploadID string, da
|
||||
}
|
||||
if !ok {
|
||||
file.CloseAndPurge()
|
||||
return ObjectMetadata{}, probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return ObjectMetadata{}, probe.NewError(SignDoesNotMatch{})
|
||||
}
|
||||
}
|
||||
completeMultipartUpload := &CompleteMultipartUpload{}
|
||||
|
||||
@@ -178,7 +178,7 @@ func getMetadata(rootPath, bucket, object string) (ObjectMetadata, *probe.Error)
|
||||
// isMD5SumEqual - returns error if md5sum mismatches, success its `nil`
|
||||
func isMD5SumEqual(expectedMD5Sum, actualMD5Sum string) bool {
|
||||
// Verify the md5sum.
|
||||
if strings.TrimSpace(expectedMD5Sum) != "" && strings.TrimSpace(actualMD5Sum) != "" {
|
||||
if expectedMD5Sum != "" && actualMD5Sum != "" {
|
||||
// Decode md5sum to bytes from their hexadecimal
|
||||
// representations.
|
||||
expectedMD5SumBytes, err := hex.DecodeString(expectedMD5Sum)
|
||||
@@ -199,7 +199,7 @@ func isMD5SumEqual(expectedMD5Sum, actualMD5Sum string) bool {
|
||||
}
|
||||
|
||||
// CreateObject - create an object.
|
||||
func (fs Filesystem) CreateObject(bucket, object, expectedMD5Sum string, size int64, data io.Reader, signature *signV4.Signature) (ObjectMetadata, *probe.Error) {
|
||||
func (fs Filesystem) CreateObject(bucket, object, expectedMD5Sum string, size int64, data io.Reader, sig *signV4.Signature) (ObjectMetadata, *probe.Error) {
|
||||
di, e := disk.GetInfo(fs.path)
|
||||
if e != nil {
|
||||
return ObjectMetadata{}, probe.NewError(e)
|
||||
@@ -233,9 +233,9 @@ func (fs Filesystem) CreateObject(bucket, object, expectedMD5Sum string, size in
|
||||
|
||||
// Get object path.
|
||||
objectPath := filepath.Join(bucketPath, object)
|
||||
if strings.TrimSpace(expectedMD5Sum) != "" {
|
||||
if expectedMD5Sum != "" {
|
||||
var expectedMD5SumBytes []byte
|
||||
expectedMD5SumBytes, e = base64.StdEncoding.DecodeString(strings.TrimSpace(expectedMD5Sum))
|
||||
expectedMD5SumBytes, e = base64.StdEncoding.DecodeString(expectedMD5Sum)
|
||||
if e != nil {
|
||||
// Pro-actively close the connection.
|
||||
return ObjectMetadata{}, probe.NewError(InvalidDigest{MD5: expectedMD5Sum})
|
||||
@@ -244,7 +244,7 @@ func (fs Filesystem) CreateObject(bucket, object, expectedMD5Sum string, size in
|
||||
}
|
||||
|
||||
// Write object.
|
||||
file, e := atomic.FileCreateWithPrefix(objectPath, "$tmpobject")
|
||||
file, e := atomic.FileCreateWithPrefix(objectPath, expectedMD5Sum+"$tmpobject")
|
||||
if e != nil {
|
||||
switch e := e.(type) {
|
||||
case *os.PathError:
|
||||
@@ -279,22 +279,22 @@ func (fs Filesystem) CreateObject(bucket, object, expectedMD5Sum string, size in
|
||||
md5Sum := hex.EncodeToString(md5Hasher.Sum(nil))
|
||||
// Verify if the written object is equal to what is expected, only
|
||||
// if it is requested as such.
|
||||
if strings.TrimSpace(expectedMD5Sum) != "" {
|
||||
if !isMD5SumEqual(strings.TrimSpace(expectedMD5Sum), md5Sum) {
|
||||
if expectedMD5Sum != "" {
|
||||
if !isMD5SumEqual(expectedMD5Sum, md5Sum) {
|
||||
file.CloseAndPurge()
|
||||
return ObjectMetadata{}, probe.NewError(BadDigest{MD5: expectedMD5Sum, Bucket: bucket, Object: object})
|
||||
}
|
||||
}
|
||||
sha256Sum := hex.EncodeToString(sha256Hasher.Sum(nil))
|
||||
if signature != nil {
|
||||
ok, err := signature.DoesSignatureMatch(sha256Sum)
|
||||
if sig != nil {
|
||||
ok, err := sig.DoesSignatureMatch(sha256Sum)
|
||||
if err != nil {
|
||||
file.CloseAndPurge()
|
||||
return ObjectMetadata{}, err.Trace()
|
||||
}
|
||||
if !ok {
|
||||
file.CloseAndPurge()
|
||||
return ObjectMetadata{}, probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return ObjectMetadata{}, signV4.ErrSignDoesNotMath("Signature does not match")
|
||||
}
|
||||
}
|
||||
file.Close()
|
||||
|
||||
@@ -16,33 +16,41 @@
|
||||
|
||||
package signature
|
||||
|
||||
// MissingDateHeader date header missing
|
||||
type MissingDateHeader struct{}
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
func (e MissingDateHeader) Error() string {
|
||||
return "Missing date header"
|
||||
"github.com/minio/minio/pkg/probe"
|
||||
)
|
||||
|
||||
type errFunc func(msg string, a ...string) *probe.Error
|
||||
|
||||
func errFactory() errFunc {
|
||||
return func(msg string, a ...string) *probe.Error {
|
||||
return probe.NewError(fmt.Errorf("%s, Args: %s", msg, a)).Untrace()
|
||||
}
|
||||
}
|
||||
|
||||
// MissingExpiresQuery expires query string missing
|
||||
type MissingExpiresQuery struct{}
|
||||
|
||||
func (e MissingExpiresQuery) Error() string {
|
||||
return "Missing expires query string"
|
||||
}
|
||||
|
||||
// ExpiredPresignedRequest request already expired
|
||||
type ExpiredPresignedRequest struct{}
|
||||
|
||||
func (e ExpiredPresignedRequest) Error() string {
|
||||
return "Presigned request already expired"
|
||||
}
|
||||
|
||||
// SigDoesNotMatch invalid signature
|
||||
type SigDoesNotMatch struct {
|
||||
SignatureSent string
|
||||
SignatureCalculated string
|
||||
}
|
||||
|
||||
func (e SigDoesNotMatch) Error() string {
|
||||
return "The request signature we calculated does not match the signature you provided"
|
||||
}
|
||||
// Various errors.
|
||||
var (
|
||||
ErrPolicyAlreadyExpired = errFactory()
|
||||
ErrInvalidRegion = errFactory()
|
||||
ErrInvalidDateFormat = errFactory()
|
||||
ErrInvalidService = errFactory()
|
||||
ErrInvalidRequestVersion = errFactory()
|
||||
ErrMissingFields = errFactory()
|
||||
ErrMissingCredTag = errFactory()
|
||||
ErrCredMalformed = errFactory()
|
||||
ErrMissingSignTag = errFactory()
|
||||
ErrMissingSignHeadersTag = errFactory()
|
||||
ErrMissingDateHeader = errFactory()
|
||||
ErrMalformedDate = errFactory()
|
||||
ErrMalformedExpires = errFactory()
|
||||
ErrAuthHeaderEmpty = errFactory()
|
||||
ErrUnsuppSignAlgo = errFactory()
|
||||
ErrMissingExpiresQuery = errFactory()
|
||||
ErrExpiredPresignRequest = errFactory()
|
||||
ErrSignDoesNotMath = errFactory()
|
||||
ErrInvalidAccessKeyID = errFactory()
|
||||
ErrInvalidSecretKey = errFactory()
|
||||
ErrRegionISEmpty = errFactory()
|
||||
)
|
||||
|
||||
@@ -17,9 +17,11 @@
|
||||
package signature
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/minio/minio/pkg/probe"
|
||||
@@ -67,8 +69,8 @@ type PostPolicyForm struct {
|
||||
}
|
||||
}
|
||||
|
||||
// ParsePostPolicyForm - Parse JSON policy string into typed POostPolicyForm structure.
|
||||
func ParsePostPolicyForm(policy string) (PostPolicyForm, *probe.Error) {
|
||||
// parsePostPolicyFormV4 - Parse JSON policy string into typed POostPolicyForm structure.
|
||||
func parsePostPolicyFormV4(policy string) (PostPolicyForm, *probe.Error) {
|
||||
// Convert po into interfaces and
|
||||
// perform strict type conversion using reflection.
|
||||
var rawPolicy struct {
|
||||
@@ -155,3 +157,53 @@ func ParsePostPolicyForm(policy string) (PostPolicyForm, *probe.Error) {
|
||||
}
|
||||
return parsedPolicy, nil
|
||||
}
|
||||
|
||||
// ApplyPolicyCond - apply policy conditions and validate input values.
|
||||
func ApplyPolicyCond(formValues map[string]string) *probe.Error {
|
||||
if formValues["X-Amz-Algorithm"] != signV4Algorithm {
|
||||
return ErrUnsuppSignAlgo("Unsupported signature algorithm in policy form data.", formValues["X-Amz-Algorithm"]).Trace(formValues["X-Amz-Algorithm"])
|
||||
}
|
||||
/// Decoding policy
|
||||
policyBytes, e := base64.StdEncoding.DecodeString(formValues["Policy"])
|
||||
if e != nil {
|
||||
return probe.NewError(e)
|
||||
}
|
||||
postPolicyForm, err := parsePostPolicyFormV4(string(policyBytes))
|
||||
if err != nil {
|
||||
return err.Trace()
|
||||
}
|
||||
if !postPolicyForm.Expiration.After(time.Now().UTC()) {
|
||||
return ErrPolicyAlreadyExpired("Policy has already expired, please generate a new one.")
|
||||
}
|
||||
if postPolicyForm.Conditions.Policies["$bucket"].Operator == "eq" {
|
||||
if formValues["Bucket"] != postPolicyForm.Conditions.Policies["$bucket"].Value {
|
||||
return ErrMissingFields("Policy bucket is missing.", formValues["Bucket"])
|
||||
}
|
||||
}
|
||||
if postPolicyForm.Conditions.Policies["$x-amz-date"].Operator == "eq" {
|
||||
if formValues["X-Amz-Date"] != postPolicyForm.Conditions.Policies["$x-amz-date"].Value {
|
||||
return ErrMissingFields("Policy date is missing.", formValues["X-Amz-Date"])
|
||||
}
|
||||
}
|
||||
if postPolicyForm.Conditions.Policies["$Content-Type"].Operator == "starts-with" {
|
||||
if !strings.HasPrefix(formValues["Content-Type"], postPolicyForm.Conditions.Policies["$Content-Type"].Value) {
|
||||
return ErrMissingFields("Policy content-type is missing or invalid.", formValues["Content-Type"])
|
||||
}
|
||||
}
|
||||
if postPolicyForm.Conditions.Policies["$Content-Type"].Operator == "eq" {
|
||||
if formValues["Content-Type"] != postPolicyForm.Conditions.Policies["$Content-Type"].Value {
|
||||
return ErrMissingFields("Policy content-Type is missing or invalid.", formValues["Content-Type"])
|
||||
}
|
||||
}
|
||||
if postPolicyForm.Conditions.Policies["$key"].Operator == "starts-with" {
|
||||
if !strings.HasPrefix(formValues["Key"], postPolicyForm.Conditions.Policies["$key"].Value) {
|
||||
return ErrMissingFields("Policy key is missing.", formValues["Key"])
|
||||
}
|
||||
}
|
||||
if postPolicyForm.Conditions.Policies["$key"].Operator == "eq" {
|
||||
if formValues["Key"] != postPolicyForm.Conditions.Policies["$key"].Value {
|
||||
return ErrMissingFields("Policy key is missing.", formValues["Key"])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -18,16 +18,13 @@ package signature
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/hmac"
|
||||
"encoding/hex"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/minio/minio/pkg/crypto/sha256"
|
||||
"github.com/minio/minio/pkg/probe"
|
||||
@@ -35,72 +32,52 @@ import (
|
||||
|
||||
// Signature - local variables
|
||||
type Signature struct {
|
||||
AccessKeyID string
|
||||
SecretAccessKey string
|
||||
Region string
|
||||
Presigned bool
|
||||
PresignedPolicy string
|
||||
SignedHeaders []string
|
||||
Signature string
|
||||
Request *http.Request
|
||||
accessKeyID string
|
||||
secretAccessKey string
|
||||
region string
|
||||
httpRequest *http.Request
|
||||
extractedSignedHeaders http.Header
|
||||
}
|
||||
|
||||
const (
|
||||
authHeaderPrefix = "AWS4-HMAC-SHA256"
|
||||
iso8601Format = "20060102T150405Z"
|
||||
yyyymmdd = "20060102"
|
||||
signV4Algorithm = "AWS4-HMAC-SHA256"
|
||||
iso8601Format = "20060102T150405Z"
|
||||
yyyymmdd = "20060102"
|
||||
)
|
||||
|
||||
// sumHMAC calculate hmac between two input byte array
|
||||
func sumHMAC(key []byte, data []byte) []byte {
|
||||
hash := hmac.New(sha256.New, key)
|
||||
hash.Write(data)
|
||||
return hash.Sum(nil)
|
||||
// New - initialize a new authorization checkes.
|
||||
func New(accessKeyID, secretAccessKey, region string) (*Signature, *probe.Error) {
|
||||
if !isValidAccessKey.MatchString(accessKeyID) {
|
||||
return nil, ErrInvalidAccessKeyID("Invalid access key id.", accessKeyID).Trace(accessKeyID)
|
||||
}
|
||||
if !isValidSecretKey.MatchString(secretAccessKey) {
|
||||
return nil, ErrInvalidAccessKeyID("Invalid secret key.", secretAccessKey).Trace(secretAccessKey)
|
||||
}
|
||||
if region == "" {
|
||||
return nil, ErrRegionISEmpty("Region is empty.").Trace()
|
||||
}
|
||||
signature := &Signature{
|
||||
accessKeyID: accessKeyID,
|
||||
secretAccessKey: secretAccessKey,
|
||||
region: region,
|
||||
}
|
||||
return signature, nil
|
||||
}
|
||||
|
||||
// getURLEncodedName encode the strings from UTF-8 byte representations to HTML hex escape sequences
|
||||
//
|
||||
// This is necessary since regular url.Parse() and url.Encode() functions do not support UTF-8
|
||||
// non english characters cannot be parsed due to the nature in which url.Encode() is written
|
||||
//
|
||||
// This function on the other hand is a direct replacement for url.Encode() technique to support
|
||||
// pretty much every UTF-8 character.
|
||||
func getURLEncodedName(name string) string {
|
||||
// if object matches reserved string, no need to encode them
|
||||
reservedNames := regexp.MustCompile("^[a-zA-Z0-9-_.~/]+$")
|
||||
if reservedNames.MatchString(name) {
|
||||
return name
|
||||
// SetHTTPRequestToVerify - sets the http request which needs to be verified.
|
||||
func (s *Signature) SetHTTPRequestToVerify(r *http.Request) *Signature {
|
||||
// Do not set http request if its 'nil'.
|
||||
if r == nil {
|
||||
return s
|
||||
}
|
||||
var encodedName string
|
||||
for _, s := range name {
|
||||
if 'A' <= s && s <= 'Z' || 'a' <= s && s <= 'z' || '0' <= s && s <= '9' { // §2.3 Unreserved characters (mark)
|
||||
encodedName = encodedName + string(s)
|
||||
continue
|
||||
}
|
||||
switch s {
|
||||
case '-', '_', '.', '~', '/': // §2.3 Unreserved characters (mark)
|
||||
encodedName = encodedName + string(s)
|
||||
continue
|
||||
default:
|
||||
len := utf8.RuneLen(s)
|
||||
if len < 0 {
|
||||
return name
|
||||
}
|
||||
u := make([]byte, len)
|
||||
utf8.EncodeRune(u, s)
|
||||
for _, r := range u {
|
||||
hex := hex.EncodeToString([]byte{r})
|
||||
encodedName = encodedName + "%" + strings.ToUpper(hex)
|
||||
}
|
||||
}
|
||||
}
|
||||
return encodedName
|
||||
s.httpRequest = r
|
||||
return s
|
||||
}
|
||||
|
||||
// getCanonicalHeaders generate a list of request headers with their values
|
||||
func (r Signature) getCanonicalHeaders(signedHeaders map[string][]string) string {
|
||||
func (s Signature) getCanonicalHeaders(signedHeaders http.Header) string {
|
||||
var headers []string
|
||||
vals := make(map[string][]string)
|
||||
vals := make(http.Header)
|
||||
for k, vv := range signedHeaders {
|
||||
headers = append(headers, strings.ToLower(k))
|
||||
vals[strings.ToLower(k)] = vv
|
||||
@@ -114,7 +91,7 @@ func (r Signature) getCanonicalHeaders(signedHeaders map[string][]string) string
|
||||
buf.WriteByte(':')
|
||||
switch {
|
||||
case k == "host":
|
||||
buf.WriteString(r.Request.Host)
|
||||
buf.WriteString(s.httpRequest.Host)
|
||||
fallthrough
|
||||
default:
|
||||
for idx, v := range vals[k] {
|
||||
@@ -130,7 +107,7 @@ func (r Signature) getCanonicalHeaders(signedHeaders map[string][]string) string
|
||||
}
|
||||
|
||||
// getSignedHeaders generate a string i.e alphabetically sorted, semicolon-separated list of lowercase request header names
|
||||
func (r Signature) getSignedHeaders(signedHeaders map[string][]string) string {
|
||||
func (s Signature) getSignedHeaders(signedHeaders http.Header) string {
|
||||
var headers []string
|
||||
for k := range signedHeaders {
|
||||
headers = append(headers, strings.ToLower(k))
|
||||
@@ -140,41 +117,6 @@ func (r Signature) getSignedHeaders(signedHeaders map[string][]string) string {
|
||||
return strings.Join(headers, ";")
|
||||
}
|
||||
|
||||
// extractSignedHeaders extract signed headers from Authorization header
|
||||
func (r Signature) extractSignedHeaders() map[string][]string {
|
||||
extractedSignedHeadersMap := make(map[string][]string)
|
||||
for _, header := range r.SignedHeaders {
|
||||
val, ok := r.Request.Header[http.CanonicalHeaderKey(header)]
|
||||
if !ok {
|
||||
// Golang http server strips off 'Expect' header, if the
|
||||
// client sent this as part of signed headers we need to
|
||||
// handle otherwise we would see a signature mismatch.
|
||||
// `aws-cli` sets this as part of signed headers which is
|
||||
// a bad idea since servers trying to implement AWS
|
||||
// Signature version '4' will all encounter this issue.
|
||||
//
|
||||
// According to
|
||||
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.20
|
||||
// Expect header is always of form:
|
||||
//
|
||||
// Expect = "Expect" ":" 1#expectation
|
||||
// expectation = "100-continue" | expectation-extension
|
||||
//
|
||||
// So it safe to assume that '100-continue' is what would
|
||||
// be sent, for the time being keep this work around.
|
||||
// Adding a *TODO* to remove this later when Golang server
|
||||
// doesn't filter out the 'Expect' header.
|
||||
if header == "expect" {
|
||||
extractedSignedHeadersMap[header] = []string{"100-continue"}
|
||||
}
|
||||
// if not found continue, we will fail later
|
||||
continue
|
||||
}
|
||||
extractedSignedHeadersMap[header] = val
|
||||
}
|
||||
return extractedSignedHeadersMap
|
||||
}
|
||||
|
||||
// getCanonicalRequest generate a canonical request of style
|
||||
//
|
||||
// canonicalRequest =
|
||||
@@ -185,18 +127,18 @@ func (r Signature) extractSignedHeaders() map[string][]string {
|
||||
// <SignedHeaders>\n
|
||||
// <HashedPayload>
|
||||
//
|
||||
func (r *Signature) getCanonicalRequest() string {
|
||||
payload := r.Request.Header.Get(http.CanonicalHeaderKey("x-amz-content-sha256"))
|
||||
r.Request.URL.RawQuery = strings.Replace(r.Request.URL.Query().Encode(), "+", "%20", -1)
|
||||
encodedPath := getURLEncodedName(r.Request.URL.Path)
|
||||
// convert any space strings back to "+"
|
||||
func (s *Signature) getCanonicalRequest() string {
|
||||
payload := s.httpRequest.Header.Get(http.CanonicalHeaderKey("x-amz-content-sha256"))
|
||||
s.httpRequest.URL.RawQuery = strings.Replace(s.httpRequest.URL.Query().Encode(), "+", "%20", -1)
|
||||
encodedPath := getURLEncodedName(s.httpRequest.URL.Path)
|
||||
// Convert any space strings back to "+".
|
||||
encodedPath = strings.Replace(encodedPath, "+", "%20", -1)
|
||||
canonicalRequest := strings.Join([]string{
|
||||
r.Request.Method,
|
||||
s.httpRequest.Method,
|
||||
encodedPath,
|
||||
r.Request.URL.RawQuery,
|
||||
r.getCanonicalHeaders(r.extractSignedHeaders()),
|
||||
r.getSignedHeaders(r.extractSignedHeaders()),
|
||||
s.httpRequest.URL.RawQuery,
|
||||
s.getCanonicalHeaders(s.extractedSignedHeaders),
|
||||
s.getSignedHeaders(s.extractedSignedHeaders),
|
||||
payload,
|
||||
}, "\n")
|
||||
return canonicalRequest
|
||||
@@ -212,69 +154,89 @@ func (r *Signature) getCanonicalRequest() string {
|
||||
// <SignedHeaders>\n
|
||||
// <HashedPayload>
|
||||
//
|
||||
func (r Signature) getPresignedCanonicalRequest(presignedQuery string) string {
|
||||
func (s Signature) getPresignedCanonicalRequest(presignedQuery string) string {
|
||||
rawQuery := strings.Replace(presignedQuery, "+", "%20", -1)
|
||||
encodedPath := getURLEncodedName(r.Request.URL.Path)
|
||||
// convert any space strings back to "+"
|
||||
encodedPath := getURLEncodedName(s.httpRequest.URL.Path)
|
||||
// Convert any space strings back to "+".
|
||||
encodedPath = strings.Replace(encodedPath, "+", "%20", -1)
|
||||
canonicalRequest := strings.Join([]string{
|
||||
r.Request.Method,
|
||||
s.httpRequest.Method,
|
||||
encodedPath,
|
||||
rawQuery,
|
||||
r.getCanonicalHeaders(r.extractSignedHeaders()),
|
||||
r.getSignedHeaders(r.extractSignedHeaders()),
|
||||
s.getCanonicalHeaders(s.extractedSignedHeaders),
|
||||
s.getSignedHeaders(s.extractedSignedHeaders),
|
||||
"UNSIGNED-PAYLOAD",
|
||||
}, "\n")
|
||||
return canonicalRequest
|
||||
}
|
||||
|
||||
// getScope generate a string of a specific date, an AWS region, and a service
|
||||
func (r Signature) getScope(t time.Time) string {
|
||||
// getScope generate a string of a specific date, an AWS region, and a service.
|
||||
func (s Signature) getScope(t time.Time) string {
|
||||
scope := strings.Join([]string{
|
||||
t.Format(yyyymmdd),
|
||||
r.Region,
|
||||
s.region,
|
||||
"s3",
|
||||
"aws4_request",
|
||||
}, "/")
|
||||
return scope
|
||||
}
|
||||
|
||||
// getStringToSign a string based on selected query values
|
||||
func (r Signature) getStringToSign(canonicalRequest string, t time.Time) string {
|
||||
stringToSign := authHeaderPrefix + "\n" + t.Format(iso8601Format) + "\n"
|
||||
stringToSign = stringToSign + r.getScope(t) + "\n"
|
||||
// getStringToSign a string based on selected query values.
|
||||
func (s Signature) getStringToSign(canonicalRequest string, t time.Time) string {
|
||||
stringToSign := signV4Algorithm + "\n" + t.Format(iso8601Format) + "\n"
|
||||
stringToSign = stringToSign + s.getScope(t) + "\n"
|
||||
canonicalRequestBytes := sha256.Sum256([]byte(canonicalRequest))
|
||||
stringToSign = stringToSign + hex.EncodeToString(canonicalRequestBytes[:])
|
||||
return stringToSign
|
||||
}
|
||||
|
||||
// getSigningKey hmac seed to calculate final signature
|
||||
func (r Signature) getSigningKey(t time.Time) []byte {
|
||||
secret := r.SecretAccessKey
|
||||
// getSigningKey hmac seed to calculate final signature.
|
||||
func (s Signature) getSigningKey(t time.Time) []byte {
|
||||
secret := s.secretAccessKey
|
||||
date := sumHMAC([]byte("AWS4"+secret), []byte(t.Format(yyyymmdd)))
|
||||
region := sumHMAC(date, []byte(r.Region))
|
||||
region := sumHMAC(date, []byte(s.region))
|
||||
service := sumHMAC(region, []byte("s3"))
|
||||
signingKey := sumHMAC(service, []byte("aws4_request"))
|
||||
return signingKey
|
||||
}
|
||||
|
||||
// getSignature final signature in hexadecimal form
|
||||
func (r Signature) getSignature(signingKey []byte, stringToSign string) string {
|
||||
// getSignature final signature in hexadecimal form.
|
||||
func (s Signature) getSignature(signingKey []byte, stringToSign string) string {
|
||||
return hex.EncodeToString(sumHMAC(signingKey, []byte(stringToSign)))
|
||||
}
|
||||
|
||||
// DoesPolicySignatureMatch - Verify query headers with post policy
|
||||
// - http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html
|
||||
// returns true if matches, false otherwise. if error is not nil then it is always false
|
||||
func (r *Signature) DoesPolicySignatureMatch(date string) (bool, *probe.Error) {
|
||||
t, err := time.Parse(iso8601Format, date)
|
||||
func (s *Signature) DoesPolicySignatureMatch(formValues map[string]string) (bool, *probe.Error) {
|
||||
// Parse credential tag.
|
||||
creds, err := parseCredential(formValues["X-Amz-Credential"])
|
||||
if err != nil {
|
||||
return false, probe.NewError(err)
|
||||
return false, err.Trace(formValues["X-Amz-Credential"])
|
||||
}
|
||||
signingKey := r.getSigningKey(t)
|
||||
stringToSign := string(r.PresignedPolicy)
|
||||
newSignature := r.getSignature(signingKey, stringToSign)
|
||||
if newSignature != r.Signature {
|
||||
|
||||
// Verify if the access key id matches.
|
||||
if creds.accessKeyID != s.accessKeyID {
|
||||
return false, ErrInvalidAccessKeyID("Access key id does not match with our records.", creds.accessKeyID).Trace(creds.accessKeyID)
|
||||
}
|
||||
|
||||
// Verify if the region is valid.
|
||||
reqRegion := creds.scope.region
|
||||
if !isValidRegion(reqRegion, s.region) {
|
||||
return false, ErrInvalidRegion("Requested region is not recognized.", reqRegion).Trace(reqRegion)
|
||||
}
|
||||
|
||||
// Save region.
|
||||
s.region = reqRegion
|
||||
|
||||
// Parse date string.
|
||||
t, e := time.Parse(iso8601Format, formValues["X-Amz-Date"])
|
||||
if e != nil {
|
||||
return false, probe.NewError(e)
|
||||
}
|
||||
signingKey := s.getSigningKey(t)
|
||||
newSignature := s.getSignature(signingKey, formValues["Policy"])
|
||||
if newSignature != formValues["X-Amz-Signature"] {
|
||||
return false, nil
|
||||
}
|
||||
return true, nil
|
||||
@@ -283,35 +245,49 @@ func (r *Signature) DoesPolicySignatureMatch(date string) (bool, *probe.Error) {
|
||||
// DoesPresignedSignatureMatch - Verify query headers with presigned signature
|
||||
// - http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
|
||||
// returns true if matches, false otherwise. if error is not nil then it is always false
|
||||
func (r *Signature) DoesPresignedSignatureMatch() (bool, *probe.Error) {
|
||||
query := make(url.Values)
|
||||
query.Set("X-Amz-Algorithm", authHeaderPrefix)
|
||||
func (s *Signature) DoesPresignedSignatureMatch() (bool, *probe.Error) {
|
||||
// Parse request query string.
|
||||
preSignV4Values, err := parsePreSignV4(s.httpRequest.URL.Query())
|
||||
if err != nil {
|
||||
return false, err.Trace(s.httpRequest.URL.String())
|
||||
}
|
||||
|
||||
var date string
|
||||
if date = r.Request.URL.Query().Get("X-Amz-Date"); date == "" {
|
||||
return false, probe.NewError(MissingDateHeader{})
|
||||
// Verify if the access key id matches.
|
||||
if preSignV4Values.Creds.accessKeyID != s.accessKeyID {
|
||||
return false, ErrInvalidAccessKeyID("Access key id does not match with our records.", preSignV4Values.Creds.accessKeyID).Trace(preSignV4Values.Creds.accessKeyID)
|
||||
}
|
||||
t, err := time.Parse(iso8601Format, date)
|
||||
if err != nil {
|
||||
return false, probe.NewError(err)
|
||||
|
||||
// Verify if region is valid.
|
||||
reqRegion := preSignV4Values.Creds.scope.region
|
||||
if !isValidRegion(reqRegion, s.region) {
|
||||
return false, ErrInvalidRegion("Requested region is not recognized.", reqRegion).Trace(reqRegion)
|
||||
}
|
||||
if _, ok := r.Request.URL.Query()["X-Amz-Expires"]; !ok {
|
||||
return false, probe.NewError(MissingExpiresQuery{})
|
||||
}
|
||||
expireSeconds, err := strconv.Atoi(r.Request.URL.Query().Get("X-Amz-Expires"))
|
||||
if err != nil {
|
||||
return false, probe.NewError(err)
|
||||
}
|
||||
if time.Now().UTC().Sub(t) > time.Duration(expireSeconds)*time.Second {
|
||||
return false, probe.NewError(ExpiredPresignedRequest{})
|
||||
|
||||
// Save region.
|
||||
s.region = reqRegion
|
||||
|
||||
// Extract all the signed headers along with its values.
|
||||
s.extractedSignedHeaders = extractSignedHeaders(preSignV4Values.SignedHeaders, s.httpRequest.Header)
|
||||
|
||||
// Construct new query.
|
||||
query := make(url.Values)
|
||||
query.Set("X-Amz-Algorithm", signV4Algorithm)
|
||||
|
||||
if time.Now().UTC().Sub(preSignV4Values.Date) > time.Duration(preSignV4Values.Expires)*time.Second {
|
||||
return false, ErrExpiredPresignRequest("Presigned request already expired, please initiate a new request.")
|
||||
}
|
||||
|
||||
// Save the date and expires.
|
||||
t := preSignV4Values.Date
|
||||
expireSeconds := int(preSignV4Values.Expires)
|
||||
|
||||
query.Set("X-Amz-Date", t.Format(iso8601Format))
|
||||
query.Set("X-Amz-Expires", strconv.Itoa(expireSeconds))
|
||||
query.Set("X-Amz-SignedHeaders", r.getSignedHeaders(r.extractSignedHeaders()))
|
||||
query.Set("X-Amz-Credential", r.AccessKeyID+"/"+r.getScope(t))
|
||||
query.Set("X-Amz-SignedHeaders", s.getSignedHeaders(s.extractedSignedHeaders))
|
||||
query.Set("X-Amz-Credential", s.accessKeyID+"/"+s.getScope(t))
|
||||
|
||||
// Save other headers available in the request parameters.
|
||||
for k, v := range r.Request.URL.Query() {
|
||||
for k, v := range s.httpRequest.URL.Query() {
|
||||
if strings.HasPrefix(strings.ToLower(k), "x-amz") {
|
||||
continue
|
||||
}
|
||||
@@ -320,24 +296,24 @@ func (r *Signature) DoesPresignedSignatureMatch() (bool, *probe.Error) {
|
||||
encodedQuery := query.Encode()
|
||||
|
||||
// Verify if date query is same.
|
||||
if r.Request.URL.Query().Get("X-Amz-Date") != query.Get("X-Amz-Date") {
|
||||
if s.httpRequest.URL.Query().Get("X-Amz-Date") != query.Get("X-Amz-Date") {
|
||||
return false, nil
|
||||
}
|
||||
// Verify if expires query is same.
|
||||
if r.Request.URL.Query().Get("X-Amz-Expires") != query.Get("X-Amz-Expires") {
|
||||
if s.httpRequest.URL.Query().Get("X-Amz-Expires") != query.Get("X-Amz-Expires") {
|
||||
return false, nil
|
||||
}
|
||||
// Verify if signed headers query is same.
|
||||
if r.Request.URL.Query().Get("X-Amz-SignedHeaders") != query.Get("X-Amz-SignedHeaders") {
|
||||
if s.httpRequest.URL.Query().Get("X-Amz-SignedHeaders") != query.Get("X-Amz-SignedHeaders") {
|
||||
return false, nil
|
||||
}
|
||||
// Verify if credential query is same.
|
||||
if r.Request.URL.Query().Get("X-Amz-Credential") != query.Get("X-Amz-Credential") {
|
||||
if s.httpRequest.URL.Query().Get("X-Amz-Credential") != query.Get("X-Amz-Credential") {
|
||||
return false, nil
|
||||
}
|
||||
// Verify finally if signature is same.
|
||||
newSignature := r.getSignature(r.getSigningKey(t), r.getStringToSign(r.getPresignedCanonicalRequest(encodedQuery), t))
|
||||
if r.Request.URL.Query().Get("X-Amz-Signature") != newSignature {
|
||||
newSignature := s.getSignature(s.getSigningKey(t), s.getStringToSign(s.getPresignedCanonicalRequest(encodedQuery), t))
|
||||
if s.httpRequest.URL.Query().Get("X-Amz-Signature") != newSignature {
|
||||
return false, nil
|
||||
}
|
||||
return true, nil
|
||||
@@ -346,27 +322,57 @@ func (r *Signature) DoesPresignedSignatureMatch() (bool, *probe.Error) {
|
||||
// DoesSignatureMatch - Verify authorization header with calculated header in accordance with
|
||||
// - http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html
|
||||
// returns true if matches, false otherwise. if error is not nil then it is always false
|
||||
func (r *Signature) DoesSignatureMatch(hashedPayload string) (bool, *probe.Error) {
|
||||
// set new calculated payload
|
||||
r.Request.Header.Set("X-Amz-Content-Sha256", hashedPayload)
|
||||
func (s *Signature) DoesSignatureMatch(hashedPayload string) (bool, *probe.Error) {
|
||||
// Save authorization header.
|
||||
v4Auth := s.httpRequest.Header.Get("Authorization")
|
||||
|
||||
// Add date if not present throw error
|
||||
// Parse signature version '4' header.
|
||||
signV4Values, err := parseSignV4(v4Auth)
|
||||
if err != nil {
|
||||
return false, err.Trace(v4Auth)
|
||||
}
|
||||
|
||||
// Extract all the signed headers along with its values.
|
||||
s.extractedSignedHeaders = extractSignedHeaders(signV4Values.SignedHeaders, s.httpRequest.Header)
|
||||
|
||||
// Verify if the access key id matches.
|
||||
if signV4Values.Creds.accessKeyID != s.accessKeyID {
|
||||
return false, ErrInvalidAccessKeyID("Access key id does not match with our records.", signV4Values.Creds.accessKeyID).Trace(signV4Values.Creds.accessKeyID)
|
||||
}
|
||||
|
||||
// Verify if region is valid.
|
||||
reqRegion := signV4Values.Creds.scope.region
|
||||
if !isValidRegion(reqRegion, s.region) {
|
||||
return false, ErrInvalidRegion("Requested region is not recognized.", reqRegion).Trace(reqRegion)
|
||||
}
|
||||
|
||||
// Save region.
|
||||
s.region = reqRegion
|
||||
|
||||
// Set input payload.
|
||||
s.httpRequest.Header.Set("X-Amz-Content-Sha256", hashedPayload)
|
||||
|
||||
// Extract date, if not present throw error.
|
||||
var date string
|
||||
if date = r.Request.Header.Get(http.CanonicalHeaderKey("x-amz-date")); date == "" {
|
||||
if date = r.Request.Header.Get("Date"); date == "" {
|
||||
return false, probe.NewError(MissingDateHeader{})
|
||||
if date = s.httpRequest.Header.Get(http.CanonicalHeaderKey("x-amz-date")); date == "" {
|
||||
if date = s.httpRequest.Header.Get("Date"); date == "" {
|
||||
return false, ErrMissingDateHeader("Date header is missing from the request.").Trace()
|
||||
}
|
||||
}
|
||||
t, err := time.Parse(iso8601Format, date)
|
||||
if err != nil {
|
||||
return false, probe.NewError(err)
|
||||
// Parse date header.
|
||||
t, e := time.Parse(iso8601Format, date)
|
||||
if e != nil {
|
||||
return false, probe.NewError(e)
|
||||
}
|
||||
canonicalRequest := r.getCanonicalRequest()
|
||||
stringToSign := r.getStringToSign(canonicalRequest, t)
|
||||
signingKey := r.getSigningKey(t)
|
||||
newSignature := r.getSignature(signingKey, stringToSign)
|
||||
|
||||
if newSignature != r.Signature {
|
||||
// Signature version '4'.
|
||||
canonicalRequest := s.getCanonicalRequest()
|
||||
stringToSign := s.getStringToSign(canonicalRequest, t)
|
||||
signingKey := s.getSigningKey(t)
|
||||
newSignature := s.getSignature(signingKey, stringToSign)
|
||||
|
||||
// Verify if signature match.
|
||||
if newSignature != signV4Values.Signature {
|
||||
return false, nil
|
||||
}
|
||||
return true, nil
|
||||
|
||||
118
pkg/signature/utils.go
Normal file
118
pkg/signature/utils.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package signature
|
||||
|
||||
import (
|
||||
"crypto/hmac"
|
||||
"encoding/hex"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/minio/minio/pkg/crypto/sha256"
|
||||
)
|
||||
|
||||
// AccessID and SecretID length in bytes
|
||||
const (
|
||||
MinioAccessID = 20
|
||||
MinioSecretID = 40
|
||||
)
|
||||
|
||||
/// helpers
|
||||
|
||||
// isValidSecretKey - validate secret key.
|
||||
var isValidSecretKey = regexp.MustCompile("^.{40}$")
|
||||
|
||||
// isValidAccessKey - validate access key.
|
||||
var isValidAccessKey = regexp.MustCompile("^[A-Z0-9\\-\\.\\_\\~]{20}$")
|
||||
|
||||
// isValidRegion - verify if incoming region value is valid with configured Region.
|
||||
func isValidRegion(reqRegion string, confRegion string) bool {
|
||||
if confRegion == "" || confRegion == "US" {
|
||||
confRegion = "us-east-1"
|
||||
}
|
||||
// Some older s3 clients set region as "US" instead of
|
||||
// "us-east-1", handle it.
|
||||
if reqRegion == "US" {
|
||||
reqRegion = "us-east-1"
|
||||
}
|
||||
return reqRegion == confRegion
|
||||
}
|
||||
|
||||
// sumHMAC calculate hmac between two input byte array.
|
||||
func sumHMAC(key []byte, data []byte) []byte {
|
||||
hash := hmac.New(sha256.New, key)
|
||||
hash.Write(data)
|
||||
return hash.Sum(nil)
|
||||
}
|
||||
|
||||
// getURLEncodedName encode the strings from UTF-8 byte representations to HTML hex escape sequences
|
||||
//
|
||||
// This is necessary since regular url.Parse() and url.Encode() functions do not support UTF-8
|
||||
// non english characters cannot be parsed due to the nature in which url.Encode() is written
|
||||
//
|
||||
// This function on the other hand is a direct replacement for url.Encode() technique to support
|
||||
// pretty much every UTF-8 character.
|
||||
func getURLEncodedName(name string) string {
|
||||
// if object matches reserved string, no need to encode them
|
||||
reservedNames := regexp.MustCompile("^[a-zA-Z0-9-_.~/]+$")
|
||||
if reservedNames.MatchString(name) {
|
||||
return name
|
||||
}
|
||||
var encodedName string
|
||||
for _, s := range name {
|
||||
if 'A' <= s && s <= 'Z' || 'a' <= s && s <= 'z' || '0' <= s && s <= '9' { // §2.3 Unreserved characters (mark)
|
||||
encodedName = encodedName + string(s)
|
||||
continue
|
||||
}
|
||||
switch s {
|
||||
case '-', '_', '.', '~', '/': // §2.3 Unreserved characters (mark)
|
||||
encodedName = encodedName + string(s)
|
||||
continue
|
||||
default:
|
||||
len := utf8.RuneLen(s)
|
||||
if len < 0 {
|
||||
return name
|
||||
}
|
||||
u := make([]byte, len)
|
||||
utf8.EncodeRune(u, s)
|
||||
for _, r := range u {
|
||||
hex := hex.EncodeToString([]byte{r})
|
||||
encodedName = encodedName + "%" + strings.ToUpper(hex)
|
||||
}
|
||||
}
|
||||
}
|
||||
return encodedName
|
||||
}
|
||||
|
||||
// extractSignedHeaders extract signed headers from Authorization header
|
||||
func extractSignedHeaders(signedHeaders []string, reqHeaders http.Header) http.Header {
|
||||
extractedSignedHeaders := make(http.Header)
|
||||
for _, header := range signedHeaders {
|
||||
val, ok := reqHeaders[http.CanonicalHeaderKey(header)]
|
||||
if !ok {
|
||||
// Golang http server strips off 'Expect' header, if the
|
||||
// client sent this as part of signed headers we need to
|
||||
// handle otherwise we would see a signature mismatch.
|
||||
// `aws-cli` sets this as part of signed headers.
|
||||
//
|
||||
// According to
|
||||
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.20
|
||||
// Expect header is always of form:
|
||||
//
|
||||
// Expect = "Expect" ":" 1#expectation
|
||||
// expectation = "100-continue" | expectation-extension
|
||||
//
|
||||
// So it safe to assume that '100-continue' is what would
|
||||
// be sent, for the time being keep this work around.
|
||||
// Adding a *TODO* to remove this later when Golang server
|
||||
// doesn't filter out the 'Expect' header.
|
||||
if header == "expect" {
|
||||
extractedSignedHeaders[header] = []string{"100-continue"}
|
||||
}
|
||||
// If not found continue, we will fail later.
|
||||
continue
|
||||
}
|
||||
extractedSignedHeaders[header] = val
|
||||
}
|
||||
return extractedSignedHeaders
|
||||
}
|
||||
203
pkg/signature/v4-parser.go
Normal file
203
pkg/signature/v4-parser.go
Normal file
@@ -0,0 +1,203 @@
|
||||
package signature
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/minio/minio/pkg/probe"
|
||||
)
|
||||
|
||||
type credScope struct {
|
||||
accessKeyID string
|
||||
scope struct {
|
||||
date time.Time
|
||||
region string
|
||||
service string
|
||||
request string
|
||||
}
|
||||
}
|
||||
|
||||
func parseCredential(credElement string) (credScope, *probe.Error) {
|
||||
creds := strings.Split(strings.TrimSpace(credElement), "=")
|
||||
if len(creds) != 2 {
|
||||
return credScope{}, ErrMissingFields("Credential tag has missing fields.", credElement).Trace(credElement)
|
||||
}
|
||||
if creds[0] != "Credential" {
|
||||
return credScope{}, ErrMissingCredTag("Missing credentials tag.", credElement).Trace(credElement)
|
||||
}
|
||||
credElements := strings.Split(strings.TrimSpace(creds[1]), "/")
|
||||
if len(credElements) != 5 {
|
||||
return credScope{}, ErrCredMalformed("Credential values malformed.", credElement).Trace(credElement)
|
||||
}
|
||||
if !isValidAccessKey.MatchString(credElements[0]) {
|
||||
return credScope{}, ErrInvalidAccessKeyID("Invalid access key id.", credElement).Trace(credElement)
|
||||
}
|
||||
cred := credScope{
|
||||
accessKeyID: credElements[0],
|
||||
}
|
||||
var e error
|
||||
cred.scope.date, e = time.Parse(yyyymmdd, credElements[1])
|
||||
if e != nil {
|
||||
return credScope{}, ErrInvalidDateFormat("Invalid date format.", credElement).Trace(credElement)
|
||||
}
|
||||
if credElements[2] == "" {
|
||||
return credScope{}, ErrRegionISEmpty("Region is empty.", credElement).Trace(credElement)
|
||||
}
|
||||
cred.scope.region = credElements[2]
|
||||
if credElements[3] != "s3" {
|
||||
return credScope{}, ErrInvalidService("Invalid service detected.", credElement).Trace(credElement)
|
||||
}
|
||||
cred.scope.service = credElements[3]
|
||||
if credElements[4] != "aws4_request" {
|
||||
return credScope{}, ErrInvalidRequestVersion("Invalid request version detected.", credElement).Trace(credElement)
|
||||
}
|
||||
cred.scope.request = credElements[4]
|
||||
return cred, nil
|
||||
}
|
||||
|
||||
// parse signature.
|
||||
func parseSignature(signElement string) (string, *probe.Error) {
|
||||
signFields := strings.Split(strings.TrimSpace(signElement), "=")
|
||||
if len(signFields) != 2 {
|
||||
return "", ErrMissingFields("Signature tag has missing fields.", signElement).Trace(signElement)
|
||||
}
|
||||
if signFields[0] != "Signature" {
|
||||
return "", ErrMissingSignTag("Signature tag is missing", signElement).Trace(signElement)
|
||||
}
|
||||
signature := signFields[1]
|
||||
return signature, nil
|
||||
}
|
||||
|
||||
// parse signed headers.
|
||||
func parseSignedHeaders(signedHdrElement string) ([]string, *probe.Error) {
|
||||
signedHdrFields := strings.Split(strings.TrimSpace(signedHdrElement), "=")
|
||||
if len(signedHdrFields) != 2 {
|
||||
return nil, ErrMissingFields("Signed headers tag has missing fields.", signedHdrElement).Trace(signedHdrElement)
|
||||
}
|
||||
if signedHdrFields[0] != "SignedHeaders" {
|
||||
return nil, ErrMissingSignHeadersTag("Signed headers tag is missing.", signedHdrElement).Trace(signedHdrElement)
|
||||
}
|
||||
signedHeaders := strings.Split(signedHdrFields[1], ";")
|
||||
return signedHeaders, nil
|
||||
}
|
||||
|
||||
// structured version of AWS Signature V4 header.
|
||||
type signValues struct {
|
||||
Creds credScope
|
||||
SignedHeaders []string
|
||||
Signature string
|
||||
}
|
||||
|
||||
// structued version of AWS Signature V4 query string.
|
||||
type preSignValues struct {
|
||||
signValues
|
||||
Date time.Time
|
||||
Expires time.Duration
|
||||
}
|
||||
|
||||
// Parses signature version '4' query string of the following form.
|
||||
//
|
||||
// querystring = X-Amz-Algorithm=algorithm
|
||||
// querystring += &X-Amz-Credential= urlencode(access_key_ID + '/' + credential_scope)
|
||||
// querystring += &X-Amz-Date=date
|
||||
// querystring += &X-Amz-Expires=timeout interval
|
||||
// querystring += &X-Amz-SignedHeaders=signed_headers
|
||||
// querystring += &X-Amz-Signature=signature
|
||||
//
|
||||
func parsePreSignV4(query url.Values) (preSignValues, *probe.Error) {
|
||||
// Verify if the query algorithm is supported or not.
|
||||
if query.Get("X-Amz-Algorithm") != signV4Algorithm {
|
||||
return preSignValues{}, ErrUnsuppSignAlgo("Unsupported algorithm in query string.", query.Get("X-Amz-Algorithm"))
|
||||
}
|
||||
|
||||
// Initialize signature version '4' structured header.
|
||||
preSignV4Values := preSignValues{}
|
||||
|
||||
var err *probe.Error
|
||||
// Save credentail values.
|
||||
preSignV4Values.Creds, err = parseCredential(query.Get("X-Amz-Credential"))
|
||||
if err != nil {
|
||||
return preSignValues{}, err.Trace(query.Get("X-Amz-Credential"))
|
||||
}
|
||||
|
||||
var e error
|
||||
// Save date in native time.Time.
|
||||
preSignV4Values.Date, e = time.Parse(iso8601Format, query.Get("X-Amz-Date"))
|
||||
if e != nil {
|
||||
return preSignValues{}, ErrMalformedDate("Malformed date string.", query.Get("X-Amz-Date")).Trace(query.Get("X-Amz-Date"))
|
||||
}
|
||||
|
||||
// Save expires in native time.Duration.
|
||||
preSignV4Values.Expires, e = time.ParseDuration(query.Get("X-Amz-Expires") + "s")
|
||||
if e != nil {
|
||||
return preSignValues{}, ErrMalformedExpires("Malformed expires string.", query.Get("X-Amz-Expires")).Trace(query.Get("X-Amz-Expires"))
|
||||
}
|
||||
|
||||
// Save signed headers.
|
||||
preSignV4Values.SignedHeaders, err = parseSignedHeaders(query.Get("X-Amz-SignedHeaders"))
|
||||
if err != nil {
|
||||
return preSignValues{}, err.Trace(query.Get("X-Amz-SignedHeaders"))
|
||||
}
|
||||
|
||||
// Save signature.
|
||||
preSignV4Values.Signature, err = parseSignature(query.Get("X-Amz-Signature"))
|
||||
if err != nil {
|
||||
return preSignValues{}, err.Trace(query.Get("X-Amz-Signature"))
|
||||
}
|
||||
|
||||
// Return structed form of signature query string.
|
||||
return preSignV4Values, nil
|
||||
}
|
||||
|
||||
// Parses signature version '4' header of the following form.
|
||||
//
|
||||
// Authorization: algorithm Credential=access key ID/credential scope, \
|
||||
// SignedHeaders=SignedHeaders, Signature=signature
|
||||
//
|
||||
func parseSignV4(v4Auth string) (signValues, *probe.Error) {
|
||||
// Replace all spaced strings, some clients can send spaced
|
||||
// parameters and some won't. So we pro-actively remove any spaces
|
||||
// to make parsing easier.
|
||||
v4Auth = strings.Replace(v4Auth, " ", "", -1)
|
||||
if v4Auth == "" {
|
||||
return signValues{}, ErrAuthHeaderEmpty("Auth header empty.").Trace(v4Auth)
|
||||
}
|
||||
|
||||
// Verify if the header algorithm is supported or not.
|
||||
if !strings.HasPrefix(v4Auth, signV4Algorithm) {
|
||||
return signValues{}, ErrUnsuppSignAlgo("Unsupported algorithm in authorization header.", v4Auth).Trace(v4Auth)
|
||||
}
|
||||
|
||||
// Strip off the Algorithm prefix.
|
||||
v4Auth = strings.TrimPrefix(v4Auth, signV4Algorithm)
|
||||
authFields := strings.Split(strings.TrimSpace(v4Auth), ",")
|
||||
if len(authFields) != 3 {
|
||||
return signValues{}, ErrMissingFields("Missing fields in authorization header.", v4Auth).Trace(v4Auth)
|
||||
}
|
||||
|
||||
// Initialize signature version '4' structured header.
|
||||
signV4Values := signValues{}
|
||||
|
||||
var err *probe.Error
|
||||
// Save credentail values.
|
||||
signV4Values.Creds, err = parseCredential(authFields[0])
|
||||
if err != nil {
|
||||
return signValues{}, err.Trace(v4Auth)
|
||||
}
|
||||
|
||||
// Save signed headers.
|
||||
signV4Values.SignedHeaders, err = parseSignedHeaders(authFields[1])
|
||||
if err != nil {
|
||||
return signValues{}, err.Trace(v4Auth)
|
||||
}
|
||||
|
||||
// Save signature.
|
||||
signV4Values.Signature, err = parseSignature(authFields[2])
|
||||
if err != nil {
|
||||
return signValues{}, err.Trace(v4Auth)
|
||||
}
|
||||
|
||||
// Return the structure here.
|
||||
return signV4Values, nil
|
||||
}
|
||||
@@ -306,7 +306,7 @@ func (b bucket) WriteObject(objectName string, objectData io.Reader, size int64,
|
||||
//
|
||||
// Signature mismatch occurred all temp files to be removed and all data purged.
|
||||
CleanupWritersOnError(writers)
|
||||
return ObjectMetadata{}, probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return ObjectMetadata{}, probe.NewError(SignDoesNotMatch{})
|
||||
}
|
||||
}
|
||||
objMetadata.MD5Sum = hex.EncodeToString(dataMD5sum)
|
||||
|
||||
@@ -18,6 +18,13 @@ package xl
|
||||
|
||||
import "fmt"
|
||||
|
||||
// SignDoesNotMatch - signature does not match.
|
||||
type SignDoesNotMatch struct{}
|
||||
|
||||
func (e SignDoesNotMatch) Error() string {
|
||||
return "Signature does not match."
|
||||
}
|
||||
|
||||
// InvalidArgument invalid argument
|
||||
type InvalidArgument struct{}
|
||||
|
||||
|
||||
@@ -226,7 +226,7 @@ func (xl API) createObjectPart(bucket, key, uploadID string, partID int, content
|
||||
return "", err.Trace()
|
||||
}
|
||||
if !ok {
|
||||
return "", probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return "", probe.NewError(SignDoesNotMatch{})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -342,7 +342,7 @@ func (xl API) completeMultipartUploadV2(bucket, key, uploadID string, data io.Re
|
||||
return nil, err.Trace()
|
||||
}
|
||||
if !ok {
|
||||
return nil, probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return nil, probe.NewError(SignDoesNotMatch{})
|
||||
}
|
||||
}
|
||||
parts := &CompleteMultipartUpload{}
|
||||
|
||||
@@ -376,7 +376,7 @@ func (xl API) completeMultipartUpload(bucket, object, uploadID string, data io.R
|
||||
return ObjectMetadata{}, err.Trace()
|
||||
}
|
||||
if !ok {
|
||||
return ObjectMetadata{}, probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return ObjectMetadata{}, probe.NewError(SignDoesNotMatch{})
|
||||
}
|
||||
}
|
||||
parts := &CompleteMultipartUpload{}
|
||||
|
||||
@@ -392,7 +392,7 @@ func (xl API) createObject(bucket, key, contentType, expectedMD5Sum string, size
|
||||
if !ok {
|
||||
// Delete perhaps the object is already saved, due to the nature of append()
|
||||
xl.objects.Delete(objectKey)
|
||||
return ObjectMetadata{}, probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return ObjectMetadata{}, probe.NewError(SignDoesNotMatch{})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -435,7 +435,7 @@ func (xl API) MakeBucket(bucketName, acl string, location io.Reader, signature *
|
||||
return err.Trace()
|
||||
}
|
||||
if !ok {
|
||||
return probe.NewError(signV4.SigDoesNotMatch{})
|
||||
return probe.NewError(SignDoesNotMatch{})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user