mirror of https://github.com/minio/minio.git
Add gocyclo into source build, choosing cyclomatic complexity tolerance of 15 and below
This commit is contained in:
parent
c366fdd2d8
commit
58082cd8dc
7
Makefile
7
Makefile
|
@ -14,8 +14,9 @@ getdeps: checkdeps checkgopath
|
|||
@go get github.com/tools/godep && echo "Installed godep"
|
||||
@go get github.com/golang/lint/golint && echo "Installed golint"
|
||||
@go get golang.org/x/tools/cmd/vet && echo "Installed vet"
|
||||
@go get github.com/fzipp/gocyclo && echo "Installed gocyclo"
|
||||
|
||||
verifiers: getdeps vet fmt lint
|
||||
verifiers: getdeps vet fmt lint cyclo
|
||||
|
||||
vet:
|
||||
@echo "Running $@"
|
||||
|
@ -28,6 +29,10 @@ lint:
|
|||
@echo "Running $@"
|
||||
@test -z "$$(golint ./... | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
|
||||
|
||||
cyclo:
|
||||
@echo "Running $@"
|
||||
@test -z "$$(gocyclo -over 15 . | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
|
||||
|
||||
build-all: verifiers
|
||||
@echo "Building Libraries"
|
||||
@godep go generate ./...
|
||||
|
|
|
@ -128,6 +128,50 @@ func doDir(name string) {
|
|||
}
|
||||
}
|
||||
|
||||
func doDecl(p *goPackage, decl interface{}, cmap ast.CommentMap) bool {
|
||||
switch n := decl.(type) {
|
||||
case *ast.GenDecl:
|
||||
// var, const, types
|
||||
for _, spec := range n.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.ValueSpec:
|
||||
// constants and variables.
|
||||
for _, name := range s.Names {
|
||||
p.decl[name.Name] = n
|
||||
}
|
||||
case *ast.TypeSpec:
|
||||
// type definitions.
|
||||
p.decl[s.Name.Name] = n
|
||||
}
|
||||
}
|
||||
case *ast.FuncDecl:
|
||||
// if function is 'main', never check
|
||||
if n.Name.Name == "main" {
|
||||
return true
|
||||
}
|
||||
// Do not be strict on non-exported functions
|
||||
if !ast.IsExported(n.Name.Name) {
|
||||
return true
|
||||
}
|
||||
// Do not be strict for field list functions
|
||||
// if n.Recv != nil {
|
||||
// continue
|
||||
//}
|
||||
// Be strict for global functions
|
||||
_, ok := cmap[n]
|
||||
if ok == false {
|
||||
p.missingcomments[n.Name.Name] = n
|
||||
}
|
||||
|
||||
// function declarations
|
||||
// TODO(remy): do methods
|
||||
if n.Recv == nil {
|
||||
p.decl[n.Name.Name] = n
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func doPackage(fs *token.FileSet, pkg *ast.Package) {
|
||||
p := &goPackage{
|
||||
p: pkg,
|
||||
|
@ -139,45 +183,8 @@ func doPackage(fs *token.FileSet, pkg *ast.Package) {
|
|||
for _, file := range pkg.Files {
|
||||
cmap := ast.NewCommentMap(fs, file, file.Comments)
|
||||
for _, decl := range file.Decls {
|
||||
switch n := decl.(type) {
|
||||
case *ast.GenDecl:
|
||||
// var, const, types
|
||||
for _, spec := range n.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.ValueSpec:
|
||||
// constants and variables.
|
||||
for _, name := range s.Names {
|
||||
p.decl[name.Name] = n
|
||||
}
|
||||
case *ast.TypeSpec:
|
||||
// type definitions.
|
||||
p.decl[s.Name.Name] = n
|
||||
}
|
||||
}
|
||||
case *ast.FuncDecl:
|
||||
// if function is 'main', never check
|
||||
if n.Name.Name == "main" {
|
||||
continue
|
||||
}
|
||||
// Do not be strict on non-exported functions
|
||||
if !ast.IsExported(n.Name.Name) {
|
||||
continue
|
||||
}
|
||||
// Do not be strict for field list functions
|
||||
// if n.Recv != nil {
|
||||
// continue
|
||||
//}
|
||||
// Be strict for global functions
|
||||
_, ok := cmap[n]
|
||||
if ok == false {
|
||||
p.missingcomments[n.Name.Name] = n
|
||||
}
|
||||
|
||||
// function declarations
|
||||
// TODO(remy): do methods
|
||||
if n.Recv == nil {
|
||||
p.decl[n.Name.Name] = n
|
||||
}
|
||||
if doDecl(p, decl, cmap) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,28 +56,7 @@ func newRange(req *http.Request, size int64) (*httpRange, error) {
|
|||
return r, nil
|
||||
}
|
||||
|
||||
// parseRange parses a Range header string as per RFC 2616.
|
||||
func (r *httpRange) parseRange(s string) error {
|
||||
if s == "" {
|
||||
return errors.New("header not present")
|
||||
}
|
||||
if !strings.HasPrefix(s, b) {
|
||||
return errors.New("invalid range")
|
||||
}
|
||||
|
||||
ras := strings.Split(s[len(b):], ",")
|
||||
if len(ras) == 0 {
|
||||
return errors.New("invalid request")
|
||||
}
|
||||
// Just pick the first one and ignore the rest, we only support one range per object
|
||||
if len(ras) > 1 {
|
||||
return errors.New("multiple ranges specified")
|
||||
}
|
||||
|
||||
ra := strings.TrimSpace(ras[0])
|
||||
if ra == "" {
|
||||
return errors.New("invalid range")
|
||||
}
|
||||
func (r *httpRange) parse(ra string) error {
|
||||
i := strings.Index(ra, "-")
|
||||
if i < 0 {
|
||||
return errors.New("invalid range")
|
||||
|
@ -117,3 +96,28 @@ func (r *httpRange) parseRange(s string) error {
|
|||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// parseRange parses a Range header string as per RFC 2616.
|
||||
func (r *httpRange) parseRange(s string) error {
|
||||
if s == "" {
|
||||
return errors.New("header not present")
|
||||
}
|
||||
if !strings.HasPrefix(s, b) {
|
||||
return errors.New("invalid range")
|
||||
}
|
||||
|
||||
ras := strings.Split(s[len(b):], ",")
|
||||
if len(ras) == 0 {
|
||||
return errors.New("invalid request")
|
||||
}
|
||||
// Just pick the first one and ignore the rest, we only support one range per object
|
||||
if len(ras) > 1 {
|
||||
return errors.New("multiple ranges specified")
|
||||
}
|
||||
|
||||
ra := strings.TrimSpace(ras[0])
|
||||
if ra == "" {
|
||||
return errors.New("invalid range")
|
||||
}
|
||||
return r.parse(ra)
|
||||
}
|
||||
|
|
|
@ -137,6 +137,37 @@ func isValidPrincipal(principal string) bool {
|
|||
return ok
|
||||
}
|
||||
|
||||
func parseStatement(statement Statement) bool {
|
||||
if len(statement.Sid) == 0 {
|
||||
return false
|
||||
}
|
||||
if len(statement.Effect) == 0 {
|
||||
return false
|
||||
}
|
||||
if !isValidEffect(statement.Effect) {
|
||||
return false
|
||||
}
|
||||
if len(statement.Principal.AWS) == 0 {
|
||||
return false
|
||||
}
|
||||
if !isValidPrincipal(statement.Principal.AWS) {
|
||||
return false
|
||||
}
|
||||
if len(statement.Action) == 0 {
|
||||
return false
|
||||
}
|
||||
if !isValidAction(statement.Action) && !isValidActionS3(statement.Action) {
|
||||
return false
|
||||
}
|
||||
if len(statement.Resource) == 0 {
|
||||
return false
|
||||
}
|
||||
if !isValidResource(statement.Resource) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Parsepolicy - validate request body is proper JSON and in accordance with policy standards
|
||||
func Parsepolicy(data io.Reader) (BucketPolicy, bool) {
|
||||
var policy BucketPolicy
|
||||
|
@ -157,33 +188,7 @@ func Parsepolicy(data io.Reader) (BucketPolicy, bool) {
|
|||
}
|
||||
|
||||
for _, statement := range policy.Statement {
|
||||
if len(statement.Sid) == 0 {
|
||||
goto error
|
||||
}
|
||||
if len(statement.Effect) == 0 {
|
||||
goto error
|
||||
}
|
||||
if !isValidEffect(statement.Effect) {
|
||||
goto error
|
||||
}
|
||||
|
||||
if len(statement.Principal.AWS) == 0 {
|
||||
goto error
|
||||
}
|
||||
if !isValidPrincipal(statement.Principal.AWS) {
|
||||
goto error
|
||||
}
|
||||
if len(statement.Action) == 0 {
|
||||
goto error
|
||||
}
|
||||
if !isValidAction(statement.Action) && !isValidActionS3(statement.Action) {
|
||||
goto error
|
||||
}
|
||||
if len(statement.Resource) == 0 {
|
||||
goto error
|
||||
}
|
||||
|
||||
if !isValidResource(statement.Resource) {
|
||||
if !parseStatement(statement) {
|
||||
goto error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,32 @@ import (
|
|||
"github.com/minio-io/minio/pkg/drivers"
|
||||
)
|
||||
|
||||
func (file *fileDriver) filterDelimiterPrefix(bucket, name, fname, delimitedName string, resources drivers.BucketResourcesMetadata) (drivers.ObjectMetadata, drivers.BucketResourcesMetadata, error) {
|
||||
var err error
|
||||
var metadata drivers.ObjectMetadata
|
||||
switch true {
|
||||
case name == resources.Prefix:
|
||||
// Use resources.Prefix to filter out delimited files
|
||||
metadata, err = file.GetObjectMetadata(bucket, name, resources.Prefix)
|
||||
if err != nil {
|
||||
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
|
||||
}
|
||||
case delimitedName == fname:
|
||||
// Use resources.Prefix to filter out delimited files
|
||||
metadata, err = file.GetObjectMetadata(bucket, name, resources.Prefix)
|
||||
if err != nil {
|
||||
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
|
||||
}
|
||||
case delimitedName != "":
|
||||
if delimitedName == resources.Delimiter {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
|
||||
} else {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
|
||||
}
|
||||
}
|
||||
return metadata, resources, nil
|
||||
}
|
||||
|
||||
// TODO handle resources.Marker
|
||||
func (file *fileDriver) filter(bucket, name string, f os.FileInfo, resources drivers.BucketResourcesMetadata) (drivers.ObjectMetadata, drivers.BucketResourcesMetadata, error) {
|
||||
var err error
|
||||
|
@ -34,25 +60,9 @@ func (file *fileDriver) filter(bucket, name string, f os.FileInfo, resources dri
|
|||
if strings.HasPrefix(name, resources.Prefix) {
|
||||
trimmedName := strings.TrimPrefix(name, resources.Prefix)
|
||||
delimitedName := delimiter(trimmedName, resources.Delimiter)
|
||||
switch true {
|
||||
case name == resources.Prefix:
|
||||
// Use resources.Prefix to filter out delimited files
|
||||
metadata, err = file.GetObjectMetadata(bucket, name, resources.Prefix)
|
||||
if err != nil {
|
||||
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
|
||||
}
|
||||
case delimitedName == f.Name():
|
||||
// Use resources.Prefix to filter out delimited files
|
||||
metadata, err = file.GetObjectMetadata(bucket, name, resources.Prefix)
|
||||
if err != nil {
|
||||
return drivers.ObjectMetadata{}, resources, drivers.EmbedError(bucket, "", err)
|
||||
}
|
||||
case delimitedName != "":
|
||||
if delimitedName == resources.Delimiter {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
|
||||
} else {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
|
||||
}
|
||||
metadata, resources, err = file.filterDelimiterPrefix(bucket, name, f.Name(), delimitedName, resources)
|
||||
if err != nil {
|
||||
return drivers.ObjectMetadata{}, resources, err
|
||||
}
|
||||
}
|
||||
// Delimiter present and Prefix is absent
|
||||
|
|
|
@ -173,6 +173,23 @@ func appendUniq(slice []string, i string) []string {
|
|||
return append(slice, i)
|
||||
}
|
||||
|
||||
func (memory memoryDriver) filterDelimiterPrefix(keys []string, key, delimitedName string, resources drivers.BucketResourcesMetadata) (drivers.BucketResourcesMetadata, []string) {
|
||||
switch true {
|
||||
case key == resources.Prefix:
|
||||
keys = appendUniq(keys, key)
|
||||
// DelimitedName - requires resources.Prefix as it was trimmed off earlier in the flow
|
||||
case key == resources.Prefix+delimitedName:
|
||||
keys = appendUniq(keys, key)
|
||||
case delimitedName != "":
|
||||
if delimitedName == resources.Delimiter {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
|
||||
} else {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
|
||||
}
|
||||
}
|
||||
return resources, keys
|
||||
}
|
||||
|
||||
// ListObjects - list objects from memory
|
||||
func (memory memoryDriver) ListObjects(bucket string, resources drivers.BucketResourcesMetadata) ([]drivers.ObjectMetadata, drivers.BucketResourcesMetadata, error) {
|
||||
if _, ok := memory.bucketdata[bucket]; ok == false {
|
||||
|
@ -183,7 +200,7 @@ func (memory memoryDriver) ListObjects(bucket string, resources drivers.BucketRe
|
|||
for key := range memory.objectdata {
|
||||
switch true {
|
||||
// Prefix absent, delimit object key based on delimiter
|
||||
case resources.Delimiter != "" && resources.Prefix == "":
|
||||
case resources.IsDelimiterSet():
|
||||
delimitedName := delimiter(key, resources.Delimiter)
|
||||
switch true {
|
||||
case delimitedName == "" || delimitedName == key:
|
||||
|
@ -192,31 +209,20 @@ func (memory memoryDriver) ListObjects(bucket string, resources drivers.BucketRe
|
|||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
|
||||
}
|
||||
// Prefix present, delimit object key with prefix key based on delimiter
|
||||
case resources.Delimiter != "" && resources.Prefix != "" && strings.HasPrefix(key, resources.Prefix):
|
||||
trimmedName := strings.TrimPrefix(key, resources.Prefix)
|
||||
delimitedName := delimiter(trimmedName, resources.Delimiter)
|
||||
switch true {
|
||||
case key == resources.Prefix:
|
||||
keys = appendUniq(keys, key)
|
||||
// DelimitedName - requires resources.Prefix as it was trimmed off earlier in the flow
|
||||
case key == resources.Prefix+delimitedName:
|
||||
keys = appendUniq(keys, key)
|
||||
case delimitedName != "":
|
||||
if delimitedName == resources.Delimiter {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, resources.Prefix+delimitedName)
|
||||
} else {
|
||||
resources.CommonPrefixes = appendUniq(resources.CommonPrefixes, delimitedName)
|
||||
}
|
||||
case resources.IsDelimiterPrefixSet():
|
||||
if strings.HasPrefix(key, resources.Prefix) {
|
||||
trimmedName := strings.TrimPrefix(key, resources.Prefix)
|
||||
delimitedName := delimiter(trimmedName, resources.Delimiter)
|
||||
resources, keys = memory.filterDelimiterPrefix(keys, key, delimitedName, resources)
|
||||
}
|
||||
// Prefix present, nothing to delimit
|
||||
case resources.Delimiter == "" && resources.Prefix != "" && strings.HasPrefix(key, resources.Prefix):
|
||||
case resources.IsPrefixSet():
|
||||
keys = appendUniq(keys, key)
|
||||
// Prefix and delimiter absent
|
||||
case resources.Prefix == "" && resources.Delimiter == "":
|
||||
case resources.IsDefault():
|
||||
keys = appendUniq(keys, key)
|
||||
}
|
||||
}
|
||||
|
||||
sort.Strings(keys)
|
||||
for _, key := range keys {
|
||||
if len(results) == resources.Maxkeys {
|
||||
|
|
|
@ -74,7 +74,6 @@ func erasureReader(readers []io.ReadCloser, donutMetadata map[string]string, wri
|
|||
}
|
||||
|
||||
curChunkSize := erasure.GetEncodedBlockLen(curBlockSize, uint8(k))
|
||||
|
||||
encodedBytes := make([][]byte, 16)
|
||||
for i, reader := range readers {
|
||||
defer reader.Close()
|
||||
|
|
Loading…
Reference in New Issue