Add more unit tests for azure/gcs/b2 gateway (#5236)

Also adds a blazer SDK update exposing
error response headers.
This commit is contained in:
Harshavardhana
2017-11-27 18:29:22 -08:00
committed by GitHub
parent 6a2d7ae808
commit 05b395e81d
5 changed files with 375 additions and 59 deletions

View File

@@ -17,6 +17,7 @@
package cmd
import (
"fmt"
"net/http"
"net/url"
"reflect"
@@ -31,6 +32,10 @@ func TestS3MetaToAzureProperties(t *testing.T) {
headers := map[string]string{
"accept-encoding": "gzip",
"content-encoding": "gzip",
"cache-control": "age: 3600",
"content-disposition": "dummy",
"content-length": "10",
"content-type": "application/javascript",
"X-Amz-Meta-Hdr": "value",
"X-Amz-Meta-X_test_key": "value",
"X-Amz-Meta-X__test__key": "value",
@@ -101,8 +106,21 @@ func TestAzurePropertiesToS3Meta(t *testing.T) {
"X-Amz-Meta-X-Amz-Key": "hu3ZSqtqwn+aL4V2VhAeov4i+bG3KyCtRMSXQFRHXOk=",
"X-Amz-Meta-X-Amz-Matdesc": "{}",
"X-Amz-Meta-X-Amz-Iv": "eWmyryl8kq+EVnnsE7jpOg==",
"Cache-Control": "max-age: 3600",
"Content-Disposition": "dummy",
"Content-Encoding": "gzip",
"Content-Length": "10",
"Content-MD5": "base64-md5",
"Content-Type": "application/javascript",
}
actualMeta := azurePropertiesToS3Meta(metadata, storage.BlobProperties{})
actualMeta := azurePropertiesToS3Meta(metadata, storage.BlobProperties{
CacheControl: "max-age: 3600",
ContentDisposition: "dummy",
ContentEncoding: "gzip",
ContentLength: 10,
ContentMD5: "base64-md5",
ContentType: "application/javascript",
})
if !reflect.DeepEqual(actualMeta, expectedMeta) {
t.Fatalf("Test failed, expected %#v, got %#v", expectedMeta, actualMeta)
}
@@ -119,10 +137,15 @@ func TestAzureToObjectError(t *testing.T) {
nil, nil, "", "",
},
{
errors.Trace(errUnexpected), errUnexpected, "", "",
errors.Trace(fmt.Errorf("Non azure error")),
fmt.Errorf("Non azure error"), "", "",
},
{
errors.Trace(errUnexpected), errors.Trace(errUnexpected), "", "",
storage.AzureStorageServiceError{
Code: "ContainerAlreadyExists",
}, storage.AzureStorageServiceError{
Code: "ContainerAlreadyExists",
}, "bucket", "",
},
{
errors.Trace(storage.AzureStorageServiceError{
@@ -134,6 +157,16 @@ func TestAzureToObjectError(t *testing.T) {
Code: "InvalidResourceName",
}), BucketNameInvalid{Bucket: "bucket."}, "bucket.", "",
},
{
errors.Trace(storage.AzureStorageServiceError{
Code: "RequestBodyTooLarge",
}), PartTooBig{}, "", "",
},
{
errors.Trace(storage.AzureStorageServiceError{
Code: "InvalidMetadata",
}), UnsupportedMetadata{}, "", "",
},
{
errors.Trace(storage.AzureStorageServiceError{
StatusCode: http.StatusNotFound,
@@ -154,8 +187,7 @@ func TestAzureToObjectError(t *testing.T) {
},
}
for i, testCase := range testCases {
err := azureToObjectError(testCase.actualErr, testCase.bucket, testCase.object)
if err != nil {
if err := azureToObjectError(testCase.actualErr, testCase.bucket, testCase.object); err != nil {
if err.Error() != testCase.expectedErr.Error() {
t.Errorf("Test %d: Expected error %s, got %s", i+1, testCase.expectedErr, err)
}
@@ -191,32 +223,41 @@ func TestAzureParseBlockID(t *testing.T) {
subPartNumber int
uploadID string
md5 string
success bool
}{
{"MDAwMDEuMDcuZjMyOGMzNWNhZDkzODEzNy5kNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZQ==", 1, 7, "f328c35cad938137", "d41d8cd98f00b204e9800998ecf8427e"},
{"MDAwMDIuMTkuYWJjZGMzNWNhZDkzODEzNy5hN2ZiNmI3YjM2ZWU0ZWQ2NmI1NTQ2ZmFjNDY5MDI3Mw==", 2, 19, "abcdc35cad938137", "a7fb6b7b36ee4ed66b5546fac4690273"},
// Invalid base64.
{"MDAwMDEuMDcuZjMyOGMzNWNhZDkzODEzNy5kNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZQ=", 0, 0, "", "", false},
// Invalid number of tokens.
{"MDAwMDEuQUEuZjMyOGMzNWNhZDkzODEzNwo=", 0, 0, "", "", false},
// Invalid encoded part ID.
{"MDAwMGEuMDcuZjMyOGMzNWNhZDkzODEzNy5kNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZQo=", 0, 0, "", "", false},
// Invalid sub part ID.
{"MDAwMDEuQUEuZjMyOGMzNWNhZDkzODEzNy5kNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZQo=", 0, 0, "", "", false},
{"MDAwMDEuMDcuZjMyOGMzNWNhZDkzODEzNy5kNDFkOGNkOThmMDBiMjA0ZTk4MDA5OThlY2Y4NDI3ZQ==", 1, 7, "f328c35cad938137", "d41d8cd98f00b204e9800998ecf8427e", true},
{"MDAwMDIuMTkuYWJjZGMzNWNhZDkzODEzNy5hN2ZiNmI3YjM2ZWU0ZWQ2NmI1NTQ2ZmFjNDY5MDI3Mw==", 2, 19, "abcdc35cad938137", "a7fb6b7b36ee4ed66b5546fac4690273", true},
}
for _, test := range testCases {
for i, test := range testCases {
partID, subPartNumber, uploadID, md5, err := azureParseBlockID(test.blockID)
if err != nil {
t.Fatal(err)
if err != nil && test.success {
t.Errorf("Test %d: Expected success but failed %s", i+1, err)
}
if partID != test.partID {
t.Fatalf("%d not equal to %d", partID, test.partID)
if err == nil && !test.success {
t.Errorf("Test %d: Expected to fail but succeeeded insteadl", i+1)
}
if subPartNumber != test.subPartNumber {
t.Fatalf("%d not equal to %d", subPartNumber, test.subPartNumber)
if err == nil {
if partID != test.partID {
t.Errorf("Test %d: %d not equal to %d", i+1, partID, test.partID)
}
if subPartNumber != test.subPartNumber {
t.Errorf("Test %d: %d not equal to %d", i+1, subPartNumber, test.subPartNumber)
}
if uploadID != test.uploadID {
t.Errorf("Test %d: %s not equal to %s", i+1, uploadID, test.uploadID)
}
if md5 != test.md5 {
t.Errorf("Test %d: %s not equal to %s", i+1, md5, test.md5)
}
}
if uploadID != test.uploadID {
t.Fatalf("%s not equal to %s", uploadID, test.uploadID)
}
if md5 != test.md5 {
t.Fatalf("%s not equal to %s", md5, test.md5)
}
}
_, _, _, _, err := azureParseBlockID("junk")
if err == nil {
t.Fatal("Expected azureParseBlockID() to return error")
}
}

View File

@@ -17,8 +17,12 @@
package cmd
import (
"fmt"
"net/http"
"testing"
b2 "github.com/minio/blazer/base"
"github.com/minio/minio/pkg/errors"
)
// Tests headerToObjectInfo
@@ -102,3 +106,90 @@ func TestMkRange(t *testing.T) {
}
}
}
// Test b2 object error.
func TestB2ObjectError(t *testing.T) {
testCases := []struct {
params []string
b2Err error
expectedErr error
}{
{
[]string{}, nil, nil,
},
{
[]string{}, fmt.Errorf("Not *Error"), fmt.Errorf("Not *Error"),
},
{
[]string{}, errors.Trace(fmt.Errorf("Non B2 Error")), fmt.Errorf("Non B2 Error"),
},
{
[]string{"bucket"}, errors.Trace(b2.Error{
StatusCode: 1,
Code: "duplicate_bucket_name",
}), BucketAlreadyOwnedByYou{Bucket: "bucket"},
},
{
[]string{"bucket"}, errors.Trace(b2.Error{
StatusCode: 1,
Code: "bad_request",
}), BucketNotFound{Bucket: "bucket"},
},
{
[]string{"bucket", "object"}, errors.Trace(b2.Error{
StatusCode: 1,
Code: "bad_request",
}), ObjectNameInvalid{
Bucket: "bucket",
Object: "object",
},
},
{
[]string{"bucket"}, errors.Trace(b2.Error{
StatusCode: 1,
Code: "bad_bucket_id",
}), BucketNotFound{Bucket: "bucket"},
},
{
[]string{"bucket", "object"}, errors.Trace(b2.Error{
StatusCode: 1,
Code: "file_not_present",
}), ObjectNotFound{
Bucket: "bucket",
Object: "object",
},
},
{
[]string{"bucket", "object"}, errors.Trace(b2.Error{
StatusCode: 1,
Code: "not_found",
}), ObjectNotFound{
Bucket: "bucket",
Object: "object",
},
},
{
[]string{"bucket"}, errors.Trace(b2.Error{
StatusCode: 1,
Code: "cannot_delete_non_empty_bucket",
}), BucketNotEmpty{Bucket: "bucket"},
},
{
[]string{"bucket", "object", "uploadID"}, errors.Trace(b2.Error{
StatusCode: 1,
Message: "No active upload for",
}), InvalidUploadID{
UploadID: "uploadID",
},
},
}
for i, testCase := range testCases {
actualErr := b2ToObjectError(testCase.b2Err, testCase.params...)
if actualErr != nil {
if actualErr.Error() != testCase.expectedErr.Error() {
t.Errorf("Test %d: Expected %s, got %s", i+1, testCase.expectedErr, actualErr)
}
}
}
}

View File

@@ -1,5 +1,5 @@
/*
* Minio Cloud Storage, (C) 2016 Minio, Inc.
* Minio Cloud Storage, (C) 2017 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -23,7 +23,9 @@ import (
"reflect"
"testing"
minio "github.com/minio/minio-go"
"github.com/minio/minio-go"
"github.com/minio/minio/pkg/errors"
"google.golang.org/api/googleapi"
)
func TestToGCSPageToken(t *testing.T) {
@@ -201,10 +203,192 @@ func TestGCSParseProjectID(t *testing.T) {
f.WriteString(contents)
projectID, err := gcsParseProjectID(f.Name())
if err != nil {
t.Error(err)
return
t.Fatal(err)
}
if projectID != "miniotesting" {
t.Errorf(`Expected projectID value to be "miniotesting"`)
}
if _, err = gcsParseProjectID("non-existent"); err == nil {
t.Errorf(`Expected to fail but succeeded reading "non-existent"`)
}
f.WriteString(`,}`)
if _, err := gcsParseProjectID(f.Name()); err == nil {
t.Errorf(`Expected to fail reading corrupted credentials file`)
}
}
func TestGCSPublicURL(t *testing.T) {
gcsURL := toGCSPublicURL("bucket", "testing")
if gcsURL != "https://storage.googleapis.com/bucket/testing" {
t.Errorf(`Expected "https://storage.googleapis.com/bucket/testing", got %s"`, gcsURL)
}
}
func TestGCSToObjectError(t *testing.T) {
testCases := []struct {
params []string
gcsErr error
expectedErr error
}{
{
[]string{}, nil, nil,
},
{
[]string{}, fmt.Errorf("Not *Error"), fmt.Errorf("Not *Error"),
},
{
[]string{"bucket"},
errors.Trace(fmt.Errorf("storage: bucket doesn't exist")),
BucketNotFound{
Bucket: "bucket",
},
},
{
[]string{"bucket", "object"},
errors.Trace(fmt.Errorf("storage: object doesn't exist")),
ObjectNotFound{
Bucket: "bucket",
Object: "object",
},
},
{
[]string{"bucket", "object", "uploadID"},
errors.Trace(fmt.Errorf("storage: object doesn't exist")),
InvalidUploadID{
UploadID: "uploadID",
},
},
{
[]string{},
errors.Trace(fmt.Errorf("Unknown error")),
fmt.Errorf("Unknown error"),
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Message: "No list of errors",
}),
&googleapi.Error{
Message: "No list of errors",
},
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "conflict",
Message: "You already own this bucket. Please select another name.",
}},
}),
BucketAlreadyOwnedByYou{Bucket: "bucket"},
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "conflict",
Message: "Sorry, that name is not available. Please try a different one.",
}},
}),
BucketAlreadyExists{Bucket: "bucket"},
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "conflict",
}},
}),
BucketNotEmpty{Bucket: "bucket"},
},
{
[]string{"bucket"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "notFound",
}},
}),
BucketNotFound{Bucket: "bucket"},
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "notFound",
}},
}),
ObjectNotFound{
Bucket: "bucket",
Object: "object",
},
},
{
[]string{"bucket"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "invalid",
}},
}),
BucketNameInvalid{
Bucket: "bucket",
},
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "forbidden",
}},
}),
PrefixAccessDenied{
Bucket: "bucket",
Object: "object",
},
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "keyInvalid",
}},
}),
PrefixAccessDenied{
Bucket: "bucket",
Object: "object",
},
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "required",
}},
}),
PrefixAccessDenied{
Bucket: "bucket",
Object: "object",
},
},
{
[]string{"bucket", "object"},
errors.Trace(&googleapi.Error{
Errors: []googleapi.ErrorItem{{
Reason: "unknown",
}},
}),
fmt.Errorf("Unsupported error reason: unknown"),
},
}
for i, testCase := range testCases {
actualErr := gcsToObjectError(testCase.gcsErr, testCase.params...)
if actualErr != nil {
if actualErr.Error() != testCase.expectedErr.Error() {
t.Errorf("Test %d: Expected %s, got %s", i+1, testCase.expectedErr, actualErr)
}
}
}
}