diff --git a/cmd/api-datatypes.go b/cmd/api-datatypes.go
index 0f998b6e0..f74c64440 100644
--- a/cmd/api-datatypes.go
+++ b/cmd/api-datatypes.go
@@ -25,90 +25,6 @@ const (
responseRequestIDKey = "x-amz-request-id"
)
-// CSVFileHeaderInfo -Can be either USE IGNORE OR NONE, defines what to do with
-// the first row
-type CSVFileHeaderInfo string
-
-// Constants for file header info.
-const (
- CSVFileHeaderInfoNone CSVFileHeaderInfo = "NONE"
- CSVFileHeaderInfoIgnore = "IGNORE"
- CSVFileHeaderInfoUse = "USE"
-)
-
-// SelectCompressionType - ONLY GZIP is supported
-type SelectCompressionType string
-
-// Constants for compression types under select API.
-const (
- SelectCompressionNONE SelectCompressionType = "NONE"
- SelectCompressionGZIP = "GZIP"
- SelectCompressionBZIP = "BZIP2"
-)
-
-// CSVQuoteFields - Can be either Always or AsNeeded
-type CSVQuoteFields string
-
-// Constants for csv quote styles.
-const (
- CSVQuoteFieldsAlways CSVQuoteFields = "Always"
- CSVQuoteFieldsAsNeeded = "AsNeeded"
-)
-
-// QueryExpressionType - Currently can only be SQL
-type QueryExpressionType string
-
-// Constants for expression type.
-const (
- QueryExpressionTypeSQL QueryExpressionType = "SQL"
-)
-
-// JSONType determines json input serialization type.
-type JSONType string
-
-// Constants for JSONTypes.
-const (
- JSONDocumentType JSONType = "Document"
- JSONLinesType = "Lines"
-)
-
-// ObjectSelectRequest - represents the input select body
-type ObjectSelectRequest struct {
- XMLName xml.Name `xml:"SelectObjectContentRequest" json:"-"`
- Expression string
- ExpressionType QueryExpressionType
- InputSerialization struct {
- CompressionType SelectCompressionType
- Parquet *struct{}
- CSV *struct {
- FileHeaderInfo CSVFileHeaderInfo
- RecordDelimiter string
- FieldDelimiter string
- QuoteCharacter string
- QuoteEscapeCharacter string
- Comments string
- }
- JSON *struct {
- Type JSONType
- }
- }
- OutputSerialization struct {
- CSV *struct {
- QuoteFields CSVQuoteFields
- RecordDelimiter string
- FieldDelimiter string
- QuoteCharacter string
- QuoteEscapeCharacter string
- }
- JSON *struct {
- RecordDelimiter string
- }
- }
- RequestProgress struct {
- Enabled bool
- }
-}
-
// ObjectIdentifier carries key name for the object to delete.
type ObjectIdentifier struct {
ObjectName string `xml:"Key"`
diff --git a/cmd/api-errors.go b/cmd/api-errors.go
index 4ba957d6d..5b86933f7 100644
--- a/cmd/api-errors.go
+++ b/cmd/api-errors.go
@@ -253,7 +253,6 @@ const (
ErrParseUnsupportedAlias
ErrParseUnsupportedSyntax
ErrParseUnknownOperator
- ErrParseInvalidPathComponent
ErrParseMissingIdentAfterAt
ErrParseUnexpectedOperator
ErrParseUnexpectedTerm
@@ -292,7 +291,6 @@ const (
ErrEvaluatorInvalidTimestampFormatPatternToken
ErrEvaluatorInvalidTimestampFormatPatternSymbol
ErrEvaluatorBindingDoesNotExist
- ErrInvalidColumnIndex
ErrMissingHeaders
ErrAdminConfigNotificationTargetsFailed
ErrAdminProfilerNotEnabled
@@ -1223,11 +1221,6 @@ var errorCodeResponse = map[APIErrorCode]APIError{
Description: "The SQL expression contains an invalid operator.",
HTTPStatusCode: http.StatusBadRequest,
},
- ErrParseInvalidPathComponent: {
- Code: "ParseInvalidPathComponent",
- Description: "The SQL expression contains an invalid path component.",
- HTTPStatusCode: http.StatusBadRequest,
- },
ErrParseMissingIdentAfterAt: {
Code: "ParseMissingIdentAfterAt",
Description: "Did not find the expected identifier after the @ symbol in the SQL expression.",
@@ -1413,11 +1406,6 @@ var errorCodeResponse = map[APIErrorCode]APIError{
Description: "Time stamp format pattern contains an invalid symbol in the SQL expression.",
HTTPStatusCode: http.StatusBadRequest,
},
- ErrInvalidColumnIndex: {
- Code: "InvalidColumnIndex",
- Description: "Column index in the SQL expression is invalid.",
- HTTPStatusCode: http.StatusBadRequest,
- },
ErrEvaluatorBindingDoesNotExist: {
Code: "ErrEvaluatorBindingDoesNotExist",
Description: "A column name or a path provided does not exist in the SQL expression",
@@ -1577,8 +1565,6 @@ func toAPIErrorCode(err error) (apiErr APIErrorCode) {
apiErr = ErrParseUnsupportedSyntax
case s3select.ErrParseUnknownOperator:
apiErr = ErrParseUnknownOperator
- case s3select.ErrParseInvalidPathComponent:
- apiErr = ErrParseInvalidPathComponent
case s3select.ErrParseMissingIdentAfterAt:
apiErr = ErrParseMissingIdentAfterAt
case s3select.ErrParseUnexpectedOperator:
@@ -1651,8 +1637,6 @@ func toAPIErrorCode(err error) (apiErr APIErrorCode) {
apiErr = ErrEvaluatorInvalidTimestampFormatPatternToken
case s3select.ErrEvaluatorInvalidTimestampFormatPatternSymbol:
apiErr = ErrEvaluatorInvalidTimestampFormatPatternSymbol
- case s3select.ErrInvalidColumnIndex:
- apiErr = ErrInvalidColumnIndex
case s3select.ErrEvaluatorBindingDoesNotExist:
apiErr = ErrEvaluatorBindingDoesNotExist
case s3select.ErrMissingHeaders:
diff --git a/cmd/object-handlers.go b/cmd/object-handlers.go
index 5f3614150..71cf80faf 100644
--- a/cmd/object-handlers.go
+++ b/cmd/object-handlers.go
@@ -70,12 +70,6 @@ func setHeadGetRespHeaders(w http.ResponseWriter, reqParams url.Values) {
}
}
-// This function replaces "",'' with `` for the select parser
-func cleanExpr(expr string) string {
- r := strings.NewReplacer("\"", "`", "'", "`")
- return r.Replace(expr)
-}
-
// SelectObjectContentHandler - GET Object?select
// ----------
// This implementation of the GET operation retrieves object content based
@@ -149,8 +143,7 @@ func (api objectAPIHandlers) SelectObjectContentHandler(w http.ResponseWriter, r
writeErrorResponse(w, ErrEmptyRequestBody, r.URL)
return
}
-
- var selectReq ObjectSelectRequest
+ var selectReq s3select.ObjectSelectRequest
if err := xmlDecoder(r.Body, &selectReq, r.ContentLength); err != nil {
writeErrorResponse(w, ErrMalformedXML, r.URL)
return
@@ -179,22 +172,21 @@ func (api objectAPIHandlers) SelectObjectContentHandler(w http.ResponseWriter, r
objInfo := gr.ObjInfo
- if selectReq.InputSerialization.CompressionType == SelectCompressionGZIP {
+ if selectReq.InputSerialization.CompressionType == s3select.SelectCompressionGZIP {
if !strings.Contains(objInfo.ContentType, "gzip") {
writeErrorResponse(w, ErrInvalidDataSource, r.URL)
return
}
}
- if selectReq.InputSerialization.CompressionType == SelectCompressionBZIP {
+ if selectReq.InputSerialization.CompressionType == s3select.SelectCompressionBZIP {
if !strings.Contains(objInfo.ContentType, "bzip") {
writeErrorResponse(w, ErrInvalidDataSource, r.URL)
return
}
}
- if selectReq.InputSerialization.CompressionType == SelectCompressionNONE ||
- selectReq.InputSerialization.CompressionType == "" {
- selectReq.InputSerialization.CompressionType = SelectCompressionNONE
- if !strings.Contains(objInfo.ContentType, "text/csv") {
+ if selectReq.InputSerialization.CompressionType == "" {
+ selectReq.InputSerialization.CompressionType = s3select.SelectCompressionNONE
+ if !strings.Contains(objInfo.ContentType, "text/csv") && !strings.Contains(objInfo.ContentType, "application/json") {
writeErrorResponse(w, ErrInvalidDataSource, r.URL)
return
}
@@ -207,28 +199,45 @@ func (api objectAPIHandlers) SelectObjectContentHandler(w http.ResponseWriter, r
writeErrorResponse(w, ErrExpressionTooLong, r.URL)
return
}
- if selectReq.InputSerialization.CSV == nil || selectReq.OutputSerialization.CSV == nil {
+ if selectReq.InputSerialization.CSV == nil && selectReq.InputSerialization.JSON == nil {
writeErrorResponse(w, ErrInvalidRequestParameter, r.URL)
return
}
- if selectReq.InputSerialization.CSV.FileHeaderInfo != CSVFileHeaderInfoUse &&
- selectReq.InputSerialization.CSV.FileHeaderInfo != CSVFileHeaderInfoNone &&
- selectReq.InputSerialization.CSV.FileHeaderInfo != CSVFileHeaderInfoIgnore &&
- selectReq.InputSerialization.CSV.FileHeaderInfo != "" {
- writeErrorResponse(w, ErrInvalidFileHeaderInfo, r.URL)
- return
- }
- if selectReq.OutputSerialization.CSV.QuoteFields != CSVQuoteFieldsAlways &&
- selectReq.OutputSerialization.CSV.QuoteFields != CSVQuoteFieldsAsNeeded &&
- selectReq.OutputSerialization.CSV.QuoteFields != "" {
- writeErrorResponse(w, ErrInvalidQuoteFields, r.URL)
- return
- }
- if len(selectReq.InputSerialization.CSV.RecordDelimiter) > 2 {
+ if selectReq.OutputSerialization.CSV == nil && selectReq.OutputSerialization.JSON == nil {
writeErrorResponse(w, ErrInvalidRequestParameter, r.URL)
return
}
+ if selectReq.InputSerialization.CSV != nil {
+ if selectReq.InputSerialization.CSV.FileHeaderInfo != s3select.CSVFileHeaderInfoUse &&
+ selectReq.InputSerialization.CSV.FileHeaderInfo != s3select.CSVFileHeaderInfoNone &&
+ selectReq.InputSerialization.CSV.FileHeaderInfo != s3select.CSVFileHeaderInfoIgnore &&
+ selectReq.InputSerialization.CSV.FileHeaderInfo != "" {
+ writeErrorResponse(w, ErrInvalidFileHeaderInfo, r.URL)
+ return
+ }
+ if selectReq.OutputSerialization.CSV.QuoteFields != s3select.CSVQuoteFieldsAlways &&
+ selectReq.OutputSerialization.CSV.QuoteFields != s3select.CSVQuoteFieldsAsNeeded &&
+ selectReq.OutputSerialization.CSV.QuoteFields != "" {
+ writeErrorResponse(w, ErrInvalidQuoteFields, r.URL)
+ return
+ }
+ if len(selectReq.InputSerialization.CSV.RecordDelimiter) > 2 {
+ writeErrorResponse(w, ErrInvalidRequestParameter, r.URL)
+ return
+ }
+
+ }
+ if selectReq.InputSerialization.JSON != nil {
+ if selectReq.InputSerialization.JSON.Type != s3select.JSONTypeDocument &&
+ selectReq.InputSerialization.JSON.Type != s3select.JSONLinesType &&
+ selectReq.InputSerialization.JSON.Type != "" {
+ writeErrorResponse(w, ErrInvalidJSONType, r.URL)
+ return
+ }
+
+ }
+
// Set encryption response headers
if objectAPI.IsEncryptionSupported() {
if crypto.IsEncrypted(objInfo.UserDefined) {
@@ -242,44 +251,23 @@ func (api objectAPIHandlers) SelectObjectContentHandler(w http.ResponseWriter, r
}
}
- //s3select //Options
- if selectReq.OutputSerialization.CSV.FieldDelimiter == "" {
- selectReq.OutputSerialization.CSV.FieldDelimiter = ","
+ s3s, err := s3select.New(gr, objInfo.Size, selectReq)
+ if err != nil {
+ writeErrorResponse(w, toAPIErrorCode(err), r.URL)
+ return
}
- if selectReq.InputSerialization.CSV.FileHeaderInfo == "" {
- selectReq.InputSerialization.CSV.FileHeaderInfo = CSVFileHeaderInfoNone
+
+ // Parses the select query and checks for an error
+ _, _, _, _, _, _, err = s3select.ParseSelect(s3s)
+ if err != nil {
+ writeErrorResponse(w, toAPIErrorCode(err), r.URL)
+ return
}
- if selectReq.InputSerialization.CSV.RecordDelimiter == "" {
- selectReq.InputSerialization.CSV.RecordDelimiter = "\n"
- }
- if selectReq.InputSerialization.CSV != nil {
- options := &s3select.Options{
- HasHeader: selectReq.InputSerialization.CSV.FileHeaderInfo != CSVFileHeaderInfoNone,
- RecordDelimiter: selectReq.InputSerialization.CSV.RecordDelimiter,
- FieldDelimiter: selectReq.InputSerialization.CSV.FieldDelimiter,
- Comments: selectReq.InputSerialization.CSV.Comments,
- Name: "S3Object", // Default table name for all objects
- ReadFrom: gr,
- Compressed: string(selectReq.InputSerialization.CompressionType),
- Expression: cleanExpr(selectReq.Expression),
- OutputFieldDelimiter: selectReq.OutputSerialization.CSV.FieldDelimiter,
- StreamSize: objInfo.Size,
- HeaderOpt: selectReq.InputSerialization.CSV.FileHeaderInfo == CSVFileHeaderInfoUse,
- Progress: selectReq.RequestProgress.Enabled,
- }
- s3s, err := s3select.NewInput(options)
- if err != nil {
- writeErrorResponse(w, toAPIErrorCode(err), r.URL)
- return
- }
- _, _, _, _, _, _, err = s3s.ParseSelect(options.Expression)
- if err != nil {
- writeErrorResponse(w, toAPIErrorCode(err), r.URL)
- return
- }
- if err = s3s.Execute(w); err != nil {
- logger.LogIf(ctx, err)
- }
+
+ // Executes the query on data-set
+ if err = s3select.Execute(w, s3s); err != nil {
+ logger.LogIf(ctx, err)
+
}
for k, v := range objInfo.UserDefined {
diff --git a/pkg/s3select/datatypes.go b/pkg/s3select/datatypes.go
new file mode 100644
index 000000000..a7834044f
--- /dev/null
+++ b/pkg/s3select/datatypes.go
@@ -0,0 +1,110 @@
+/*
+ * Minio Cloud Storage, (C) 2018 Minio, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package s3select
+
+import (
+ "encoding/xml"
+)
+
+// CSVFileHeaderInfo -Can be either USE IGNORE OR NONE, defines what to do with
+// the first row
+type CSVFileHeaderInfo string
+
+// Constants for file header info.
+const (
+ CSVFileHeaderInfoNone CSVFileHeaderInfo = "NONE"
+ CSVFileHeaderInfoIgnore = "IGNORE"
+ CSVFileHeaderInfoUse = "USE"
+)
+
+// The maximum character per record is set to be 1 MB.
+const (
+ MaxCharsPerRecord = 1000000
+)
+
+// SelectCompressionType - ONLY GZIP is supported
+type SelectCompressionType string
+
+// JSONType determines json input serialization type.
+type JSONType string
+
+// Constants for compression types under select API.
+const (
+ SelectCompressionNONE SelectCompressionType = "NONE"
+ SelectCompressionGZIP = "GZIP"
+ SelectCompressionBZIP = "BZIP2"
+)
+
+// CSVQuoteFields - Can be either Always or AsNeeded
+type CSVQuoteFields string
+
+// Constants for csv quote styles.
+const (
+ CSVQuoteFieldsAlways CSVQuoteFields = "Always"
+ CSVQuoteFieldsAsNeeded = "AsNeeded"
+)
+
+// QueryExpressionType - Currently can only be SQL
+type QueryExpressionType string
+
+// Constants for expression type.
+const (
+ QueryExpressionTypeSQL QueryExpressionType = "SQL"
+)
+
+// Constants for JSONTypes.
+const (
+ JSONTypeDocument JSONType = "DOCUMENT"
+ JSONLinesType = "LINES"
+)
+
+// ObjectSelectRequest - represents the input select body
+type ObjectSelectRequest struct {
+ XMLName xml.Name `xml:"SelectObjectContentRequest" json:"-"`
+ Expression string
+ ExpressionType QueryExpressionType
+ InputSerialization struct {
+ CompressionType SelectCompressionType
+ Parquet *struct{}
+ CSV *struct {
+ FileHeaderInfo CSVFileHeaderInfo
+ RecordDelimiter string
+ FieldDelimiter string
+ QuoteCharacter string
+ QuoteEscapeCharacter string
+ Comments string
+ }
+ JSON *struct {
+ Type JSONType
+ }
+ }
+ OutputSerialization struct {
+ CSV *struct {
+ QuoteFields CSVQuoteFields
+ RecordDelimiter string
+ FieldDelimiter string
+ QuoteCharacter string
+ QuoteEscapeCharacter string
+ }
+ JSON *struct {
+ RecordDelimiter string
+ }
+ }
+ RequestProgress struct {
+ Enabled bool
+ }
+}
diff --git a/pkg/s3select/errors.go b/pkg/s3select/errors.go
index bb66d6110..c9ec7028b 100644
--- a/pkg/s3select/errors.go
+++ b/pkg/s3select/errors.go
@@ -16,7 +16,11 @@
package s3select
-import "errors"
+import (
+ "errors"
+
+ "github.com/minio/minio/pkg/s3select/format"
+)
//S3 errors below
@@ -35,10 +39,6 @@ var ErrExpressionTooLong = errors.New("The SQL expression is too long: The maxim
// in the SQL function.
var ErrIllegalSQLFunctionArgument = errors.New("Illegal argument was used in the SQL function")
-// ErrInvalidColumnIndex is an error if you provide a column index which is not
-// valid.
-var ErrInvalidColumnIndex = errors.New("Column index in the SQL expression is invalid")
-
// ErrInvalidKeyPath is an error if you provide a key in the SQL expression that
// is invalid.
var ErrInvalidKeyPath = errors.New("Key path in the SQL expression is invalid")
@@ -63,10 +63,6 @@ var ErrMissingHeaders = errors.New("Some headers in the query are missing from t
// utilized with the select object query.
var ErrInvalidCompressionFormat = errors.New("The file is not in a supported compression format. Only GZIP is supported at this time")
-// ErrTruncatedInput is an error if the object is not compressed properly and an
-// error occurs during decompression.
-var ErrTruncatedInput = errors.New("Object decompression failed. Check that the object is properly compressed using the format specified in the request")
-
// ErrInvalidFileHeaderInfo is an error if the argument provided to the
// FileHeader Argument is incorrect.
var ErrInvalidFileHeaderInfo = errors.New("The FileHeaderInfo is invalid. Only NONE, USE, and IGNORE are supported")
@@ -83,13 +79,6 @@ var ErrInvalidQuoteFields = errors.New("The QuoteFields is invalid. Only ALWAYS
// request element is not valid.
var ErrInvalidRequestParameter = errors.New("The value of a parameter in Request element is invalid. Check the service API documentation and try again")
-// ErrCSVParsingError is an error if the CSV presents an error while being
-// parsed.
-var ErrCSVParsingError = errors.New("Encountered an Error parsing the CSV file. Check the file and try again")
-
-// ErrJSONParsingError is an error if while parsing the JSON an error arises.
-var ErrJSONParsingError = errors.New("Encountered an error parsing the JSON file. Check the file and try again")
-
// ErrExternalEvalException is an error that arises if the query can not be
// evaluated.
var ErrExternalEvalException = errors.New("The query cannot be evaluated. Check the file and try again")
@@ -224,10 +213,6 @@ var ErrParseUnsupportedSyntax = errors.New("The SQL expression contains unsuppor
// operator present in the SQL expression.
var ErrParseUnknownOperator = errors.New("The SQL expression contains an invalid operator")
-// ErrParseInvalidPathComponent is an error that occurs if there is an invalid
-// path component.
-var ErrParseInvalidPathComponent = errors.New("The SQL expression contains an invalid path component")
-
// ErrParseMissingIdentAfterAt is an error that occurs if the wrong symbol
// follows the "@" symbol in the SQL expression.
var ErrParseMissingIdentAfterAt = errors.New("Did not find the expected identifier after the @ symbol in the SQL expression")
@@ -395,20 +380,20 @@ var errorCodeResponse = map[error]string{
ErrUnauthorizedAccess: "UnauthorizedAccess",
ErrExpressionTooLong: "ExpressionTooLong",
ErrIllegalSQLFunctionArgument: "IllegalSqlFunctionArgument",
- ErrInvalidColumnIndex: "InvalidColumnIndex",
+ format.ErrInvalidColumnIndex: "InvalidColumnIndex",
ErrInvalidKeyPath: "InvalidKeyPath",
ErrColumnTooLong: "ColumnTooLong",
ErrOverMaxColumn: "OverMaxColumn",
ErrOverMaxRecordSize: "OverMaxRecordSize",
ErrMissingHeaders: "MissingHeaders",
ErrInvalidCompressionFormat: "InvalidCompressionFormat",
- ErrTruncatedInput: "TruncatedInput",
+ format.ErrTruncatedInput: "TruncatedInput",
ErrInvalidFileHeaderInfo: "InvalidFileHeaderInfo",
ErrInvalidJSONType: "InvalidJsonType",
ErrInvalidQuoteFields: "InvalidQuoteFields",
ErrInvalidRequestParameter: "InvalidRequestParameter",
- ErrCSVParsingError: "CSVParsingError",
- ErrJSONParsingError: "JSONParsingError",
+ format.ErrCSVParsingError: "CSVParsingError",
+ format.ErrJSONParsingError: "JSONParsingError",
ErrExternalEvalException: "ExternalEvalException",
ErrInvalidDataType: "InvalidDataType",
ErrUnrecognizedFormatException: "UnrecognizedFormatException",
@@ -443,7 +428,7 @@ var errorCodeResponse = map[error]string{
ErrParseUnsupportedAlias: "ParseUnsupportedAlias",
ErrParseUnsupportedSyntax: "ParseUnsupportedSyntax",
ErrParseUnknownOperator: "ParseUnknownOperator",
- ErrParseInvalidPathComponent: "ParseInvalidPathComponent",
+ format.ErrParseInvalidPathComponent: "ParseInvalidPathComponent",
ErrParseMissingIdentAfterAt: "ParseMissingIdentAfterAt",
ErrParseUnexpectedOperator: "ParseUnexpectedOperator",
ErrParseUnexpectedTerm: "ParseUnexpectedTerm",
diff --git a/pkg/s3select/funcEval.go b/pkg/s3select/evaluate.go
similarity index 75%
rename from pkg/s3select/funcEval.go
rename to pkg/s3select/evaluate.go
index e299e8601..047c8b7f1 100644
--- a/pkg/s3select/funcEval.go
+++ b/pkg/s3select/evaluate.go
@@ -19,12 +19,13 @@ package s3select
import (
"strings"
+ "github.com/minio/minio/pkg/s3select/format"
"github.com/xwb1989/sqlparser"
)
// stringOps is a function which handles the case in a clause if there is a need
// to perform a string function
-func stringOps(myFunc *sqlparser.FuncExpr, record []string, myReturnVal string, columnsMap map[string]int) string {
+func stringOps(myFunc *sqlparser.FuncExpr, record string, myReturnVal string) string {
var value string
funcName := myFunc.Name.CompliantName()
switch tempArg := myFunc.Exprs[0].(type) {
@@ -34,7 +35,7 @@ func stringOps(myFunc *sqlparser.FuncExpr, record []string, myReturnVal string,
// myReturnVal is actually the tail recursive value being used in the eval func.
return applyStrFunc(myReturnVal, funcName)
case *sqlparser.ColName:
- value = applyStrFunc(record[columnsMap[col.Name.CompliantName()]], funcName)
+ value = applyStrFunc(jsonValue(col.Name.CompliantName(), record), funcName)
case *sqlparser.SQLVal:
value = applyStrFunc(string(col.Val), funcName)
}
@@ -43,7 +44,7 @@ func stringOps(myFunc *sqlparser.FuncExpr, record []string, myReturnVal string,
}
// coalOps is a function which decomposes a COALESCE func expr into its struct.
-func coalOps(myFunc *sqlparser.FuncExpr, record []string, myReturnVal string, columnsMap map[string]int) string {
+func coalOps(myFunc *sqlparser.FuncExpr, record string, myReturnVal string) string {
myArgs := make([]string, len(myFunc.Exprs))
for i := 0; i < len(myFunc.Exprs); i++ {
@@ -54,7 +55,7 @@ func coalOps(myFunc *sqlparser.FuncExpr, record []string, myReturnVal string, co
// myReturnVal is actually the tail recursive value being used in the eval func.
return myReturnVal
case *sqlparser.ColName:
- myArgs[i] = record[columnsMap[col.Name.CompliantName()]]
+ myArgs[i] = jsonValue(col.Name.CompliantName(), record)
case *sqlparser.SQLVal:
myArgs[i] = string(col.Val)
}
@@ -64,7 +65,7 @@ func coalOps(myFunc *sqlparser.FuncExpr, record []string, myReturnVal string, co
}
// nullOps is a function which decomposes a NullIf func expr into its struct.
-func nullOps(myFunc *sqlparser.FuncExpr, record []string, myReturnVal string, columnsMap map[string]int) string {
+func nullOps(myFunc *sqlparser.FuncExpr, record string, myReturnVal string) string {
myArgs := make([]string, 2)
for i := 0; i < len(myFunc.Exprs); i++ {
@@ -74,7 +75,7 @@ func nullOps(myFunc *sqlparser.FuncExpr, record []string, myReturnVal string, co
case *sqlparser.FuncExpr:
return myReturnVal
case *sqlparser.ColName:
- myArgs[i] = record[columnsMap[col.Name.CompliantName()]]
+ myArgs[i] = jsonValue(col.Name.CompliantName(), record)
case *sqlparser.SQLVal:
myArgs[i] = string(col.Val)
}
@@ -118,8 +119,8 @@ func processCoalNoIndex(coalStore []string) string {
}
// evaluateFuncExpr is a function that allows for tail recursive evaluation of
-// nested function expressions.
-func evaluateFuncExpr(myVal *sqlparser.FuncExpr, myReturnVal string, myRecord []string, columnsMap map[string]int) string {
+// nested function expressions
+func evaluateFuncExpr(myVal *sqlparser.FuncExpr, myReturnVal string, myRecord string) string {
if myVal == nil {
return myReturnVal
}
@@ -140,26 +141,26 @@ func evaluateFuncExpr(myVal *sqlparser.FuncExpr, myReturnVal string, myRecord []
for i := 0; i < len(mySubFunc); i++ {
if supportedString(myVal.Name.CompliantName()) {
if mySubFunc != nil {
- return stringOps(myVal, myRecord, evaluateFuncExpr(mySubFunc[i], myReturnVal, myRecord, columnsMap), columnsMap)
+ return stringOps(myVal, myRecord, evaluateFuncExpr(mySubFunc[i], myReturnVal, myRecord))
}
- return stringOps(myVal, myRecord, myReturnVal, columnsMap)
+ return stringOps(myVal, myRecord, myReturnVal)
} else if strings.ToUpper(myVal.Name.CompliantName()) == "NULLIF" {
if mySubFunc != nil {
- return nullOps(myVal, myRecord, evaluateFuncExpr(mySubFunc[i], myReturnVal, myRecord, columnsMap), columnsMap)
+ return nullOps(myVal, myRecord, evaluateFuncExpr(mySubFunc[i], myReturnVal, myRecord))
}
- return nullOps(myVal, myRecord, myReturnVal, columnsMap)
+ return nullOps(myVal, myRecord, myReturnVal)
} else if strings.ToUpper(myVal.Name.CompliantName()) == "COALESCE" {
if mySubFunc != nil {
- return coalOps(myVal, myRecord, evaluateFuncExpr(mySubFunc[i], myReturnVal, myRecord, columnsMap), columnsMap)
+ return coalOps(myVal, myRecord, evaluateFuncExpr(mySubFunc[i], myReturnVal, myRecord))
}
- return coalOps(myVal, myRecord, myReturnVal, columnsMap)
+ return coalOps(myVal, myRecord, myReturnVal)
}
}
return ""
}
// evaluateFuncErr is a function that flags errors in nested functions.
-func (reader *Input) evaluateFuncErr(myVal *sqlparser.FuncExpr) error {
+func evaluateFuncErr(myVal *sqlparser.FuncExpr, reader format.Select) error {
if myVal == nil {
return nil
}
@@ -173,11 +174,11 @@ func (reader *Input) evaluateFuncErr(myVal *sqlparser.FuncExpr) error {
case *sqlparser.AliasedExpr:
switch col := tempArg.Expr.(type) {
case *sqlparser.FuncExpr:
- if err := reader.evaluateFuncErr(col); err != nil {
+ if err := evaluateFuncErr(col, reader); err != nil {
return err
}
case *sqlparser.ColName:
- if err := reader.colNameErrs([]string{col.Name.CompliantName()}); err != nil {
+ if err := reader.ColNameErrs([]string{col.Name.CompliantName()}); err != nil {
return err
}
}
@@ -186,11 +187,9 @@ func (reader *Input) evaluateFuncErr(myVal *sqlparser.FuncExpr) error {
return nil
}
-// evaluateIsExpr is a function for evaluating expressions of the form "column
-// is ...."
-func evaluateIsExpr(myFunc *sqlparser.IsExpr, row []string, columnNames map[string]int, alias string) (bool, error) {
+// evaluateIsExpr is a function for evaluating expressions of the form "column is ...."
+func evaluateIsExpr(myFunc *sqlparser.IsExpr, row string, alias string) (bool, error) {
operator := myFunc.Operator
- var colName string
var myVal string
switch myIs := myFunc.Expr.(type) {
// case for literal val
@@ -198,14 +197,10 @@ func evaluateIsExpr(myFunc *sqlparser.IsExpr, row []string, columnNames map[stri
myVal = string(myIs.Val)
// case for nested func val
case *sqlparser.FuncExpr:
- myVal = evaluateFuncExpr(myIs, "", row, columnNames)
+ myVal = evaluateFuncExpr(myIs, "", row)
// case for col val
case *sqlparser.ColName:
- colName = cleanCol(myIs.Name.CompliantName(), alias)
- }
- // case if it is a col val
- if colName != "" {
- myVal = row[columnNames[colName]]
+ myVal = jsonValue(myIs.Name.CompliantName(), row)
}
// case to evaluate is null
if strings.ToLower(operator) == "is null" {
@@ -221,11 +216,11 @@ func evaluateIsExpr(myFunc *sqlparser.IsExpr, row []string, columnNames map[stri
// supportedString is a function that checks whether the function is a supported
// string one
func supportedString(strFunc string) bool {
- return stringInSlice(strings.ToUpper(strFunc), []string{"TRIM", "SUBSTRING", "CHAR_LENGTH", "CHARACTER_LENGTH", "LOWER", "UPPER"})
+ return format.StringInSlice(strings.ToUpper(strFunc), []string{"TRIM", "SUBSTRING", "CHAR_LENGTH", "CHARACTER_LENGTH", "LOWER", "UPPER"})
}
// supportedFunc is a function that checks whether the function is a supported
// S3 one.
func supportedFunc(strFunc string) bool {
- return stringInSlice(strings.ToUpper(strFunc), []string{"TRIM", "SUBSTRING", "CHAR_LENGTH", "CHARACTER_LENGTH", "LOWER", "UPPER", "COALESCE", "NULLIF"})
+ return format.StringInSlice(strings.ToUpper(strFunc), []string{"TRIM", "SUBSTRING", "CHAR_LENGTH", "CHARACTER_LENGTH", "LOWER", "UPPER", "COALESCE", "NULLIF"})
}
diff --git a/pkg/s3select/format/csv/csv.go b/pkg/s3select/format/csv/csv.go
new file mode 100644
index 000000000..431804676
--- /dev/null
+++ b/pkg/s3select/format/csv/csv.go
@@ -0,0 +1,334 @@
+/*
+ * Minio Cloud Storage, (C) 2018 Minio, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package csv
+
+import (
+ "compress/bzip2"
+ "encoding/csv"
+ "encoding/xml"
+ "io"
+ "strconv"
+ "strings"
+
+ gzip "github.com/klauspost/pgzip"
+ "github.com/minio/minio/pkg/ioutil"
+ "github.com/minio/minio/pkg/s3select/format"
+)
+
+// Options options are passed to the underlying encoding/csv reader.
+type Options struct {
+ // HasHeader when true, will treat the first row as a header row.
+ HasHeader bool
+
+ // RecordDelimiter is the string that records are delimited by.
+ RecordDelimiter string
+
+ // FieldDelimiter is the string that fields are delimited by.
+ FieldDelimiter string
+
+ // Comments is the string the first character of a line of
+ // text matches the comment character.
+ Comments string
+
+ // Name of the table that is used for querying
+ Name string
+
+ // ReadFrom is where the data will be read from.
+ ReadFrom io.Reader
+
+ // If true then we need to add gzip or bzip reader.
+ // to extract the csv.
+ Compressed string
+
+ // SQL expression meant to be evaluated.
+ Expression string
+
+ // What the outputted CSV will be delimited by .
+ OutputFieldDelimiter string
+
+ // Size of incoming object
+ StreamSize int64
+
+ // Whether Header is "USE" or another
+ HeaderOpt bool
+
+ // Progress enabled, enable/disable progress messages.
+ Progress bool
+}
+
+// cinput represents a record producing input from a formatted object.
+type cinput struct {
+ options *Options
+ reader *csv.Reader
+ firstRow []string
+ header []string
+ minOutputLength int
+ stats struct {
+ BytesScanned int64
+ BytesReturned int64
+ BytesProcessed int64
+ }
+}
+
+// New sets up a new Input, the first row is read when this is run.
+// If there is a problem with reading the first row, the error is returned.
+// Otherwise, the returned reader can be reliably consumed with Read().
+// until Read() return err.
+func New(opts *Options) (format.Select, error) {
+ myReader := opts.ReadFrom
+ var tempBytesScanned int64
+ tempBytesScanned = 0
+ switch opts.Compressed {
+ case "GZIP":
+ tempBytesScanned = opts.StreamSize
+ var err error
+ if myReader, err = gzip.NewReader(opts.ReadFrom); err != nil {
+ return nil, format.ErrTruncatedInput
+ }
+ case "BZIP2":
+ tempBytesScanned = opts.StreamSize
+ myReader = bzip2.NewReader(opts.ReadFrom)
+ }
+
+ // DelimitedReader treats custom record delimiter like `\r\n`,`\r`,`ab` etc and replaces it with `\n`.
+ normalizedReader := ioutil.NewDelimitedReader(myReader, []rune(opts.RecordDelimiter))
+ reader := &cinput{
+ options: opts,
+ reader: csv.NewReader(normalizedReader),
+ }
+ reader.stats.BytesScanned = tempBytesScanned
+ reader.stats.BytesProcessed = 0
+ reader.stats.BytesReturned = 0
+
+ reader.firstRow = nil
+
+ reader.reader.FieldsPerRecord = -1
+ if reader.options.FieldDelimiter != "" {
+ reader.reader.Comma = rune(reader.options.FieldDelimiter[0])
+ }
+
+ if reader.options.Comments != "" {
+ reader.reader.Comment = rune(reader.options.Comments[0])
+ }
+
+ // QuoteCharacter - " (defaulted currently)
+ reader.reader.LazyQuotes = true
+
+ if err := reader.readHeader(); err != nil {
+ return nil, err
+ }
+
+ return reader, nil
+}
+
+// Replace the spaces in columnnames with underscores
+func cleanHeader(columns []string) []string {
+ for i := 0; i < len(columns); i++ {
+ columns[i] = strings.Replace(columns[i], " ", "_", -1)
+ }
+ return columns
+}
+
+// readHeader reads the header into the header variable if the header is present
+// as the first row of the csv
+func (reader *cinput) readHeader() error {
+ var readErr error
+ if reader.options.HasHeader {
+ reader.firstRow, readErr = reader.reader.Read()
+ if readErr != nil {
+ return format.ErrCSVParsingError
+ }
+ reader.header = cleanHeader(reader.firstRow)
+ reader.firstRow = nil
+ reader.minOutputLength = len(reader.header)
+ } else {
+ reader.firstRow, readErr = reader.reader.Read()
+ reader.header = make([]string, len(reader.firstRow))
+ for i := 0; i < reader.minOutputLength; i++ {
+ reader.header[i] = strconv.Itoa(i)
+ }
+
+ }
+ return nil
+}
+
+// Progress - return true if progress was requested.
+func (reader *cinput) Progress() bool {
+ return reader.options.Progress
+}
+
+// UpdateBytesProcessed - populates the bytes Processed
+func (reader *cinput) UpdateBytesProcessed(record map[string]interface{}) {
+ // Convert map to slice of values.
+ values := []string{}
+ for _, value := range record {
+ values = append(values, value.(string))
+ }
+
+ reader.stats.BytesProcessed += int64(len(values))
+
+}
+
+// Read the file and returns map[string]interface{}
+func (reader *cinput) Read() (map[string]interface{}, error) {
+ record := make(map[string]interface{})
+ dec := reader.readRecord()
+ if dec != nil {
+ if reader.options.HasHeader {
+ columns := reader.header
+ for i, value := range dec {
+ record[columns[i]] = value
+ }
+ } else {
+ for i, value := range dec {
+ record["_"+strconv.Itoa(i)] = value
+ }
+ }
+ return record, nil
+ }
+ return nil, nil
+}
+
+// OutputFieldDelimiter - returns the delimiter specified in input request
+func (reader *cinput) OutputFieldDelimiter() string {
+ return reader.options.OutputFieldDelimiter
+}
+
+// HasHeader - returns true or false depending upon the header.
+func (reader *cinput) HasHeader() bool {
+ return reader.options.HasHeader
+}
+
+// Expression - return the Select Expression for
+func (reader *cinput) Expression() string {
+ return reader.options.Expression
+}
+
+// UpdateBytesReturned - updates the Bytes returned for
+func (reader *cinput) UpdateBytesReturned(size int64) {
+ reader.stats.BytesReturned += size
+}
+
+// Header returns the header of the reader. Either the first row if a header
+// set in the options, or c#, where # is the column number, starting with 0.
+func (reader *cinput) Header() []string {
+ return reader.header
+}
+
+// readRecord reads a single record from the stream and it always returns successfully.
+// If the record is empty, an empty []string is returned.
+// Record expand to match the current row size, adding blank fields as needed.
+// Records never return less then the number of fields in the first row.
+// Returns nil on EOF
+// In the event of a parse error due to an invalid record, it is logged, and
+// an empty []string is returned with the number of fields in the first row,
+// as if the record were empty.
+//
+// In general, this is a very tolerant of problems reader.
+func (reader *cinput) readRecord() []string {
+ var row []string
+ var fileErr error
+
+ if reader.firstRow != nil {
+ row = reader.firstRow
+ reader.firstRow = nil
+ return row
+ }
+
+ row, fileErr = reader.reader.Read()
+ emptysToAppend := reader.minOutputLength - len(row)
+ if fileErr == io.EOF || fileErr == io.ErrClosedPipe {
+ return nil
+ } else if _, ok := fileErr.(*csv.ParseError); ok {
+ emptysToAppend = reader.minOutputLength
+ }
+
+ if emptysToAppend > 0 {
+ for counter := 0; counter < emptysToAppend; counter++ {
+ row = append(row, "")
+ }
+ }
+
+ return row
+}
+
+// CreateStatXML is the function which does the marshaling from the stat
+// structs into XML so that the progress and stat message can be sent
+func (reader *cinput) CreateStatXML() (string, error) {
+ if reader.options.Compressed == "NONE" {
+ reader.stats.BytesProcessed = reader.options.StreamSize
+ reader.stats.BytesScanned = reader.stats.BytesProcessed
+ }
+ out, err := xml.Marshal(&format.Stats{
+ BytesScanned: reader.stats.BytesScanned,
+ BytesProcessed: reader.stats.BytesProcessed,
+ BytesReturned: reader.stats.BytesReturned,
+ })
+ if err != nil {
+ return "", err
+ }
+ return xml.Header + string(out), nil
+}
+
+// CreateProgressXML is the function which does the marshaling from the progress
+// structs into XML so that the progress and stat message can be sent
+func (reader *cinput) CreateProgressXML() (string, error) {
+ if reader.options.HasHeader {
+ reader.stats.BytesProcessed += format.ProcessSize(reader.header)
+ }
+ if reader.options.Compressed == "NONE" {
+ reader.stats.BytesScanned = reader.stats.BytesProcessed
+ }
+ out, err := xml.Marshal(&format.Progress{
+ BytesScanned: reader.stats.BytesScanned,
+ BytesProcessed: reader.stats.BytesProcessed,
+ BytesReturned: reader.stats.BytesReturned,
+ })
+ if err != nil {
+ return "", err
+ }
+ return xml.Header + string(out), nil
+}
+
+// Type - return the data format type {
+func (reader *cinput) Type() format.Type {
+ return format.CSV
+}
+
+// ColNameErrs is a function which makes sure that the headers are requested are
+// present in the file otherwise it throws an error.
+func (reader *cinput) ColNameErrs(columnNames []string) error {
+ for i := 0; i < len(columnNames); i++ {
+ if columnNames[i] == "" {
+ continue
+ }
+ if !format.IsInt(columnNames[i]) && !reader.options.HeaderOpt {
+ return format.ErrInvalidColumnIndex
+ }
+ if format.IsInt(columnNames[i]) {
+ tempInt, _ := strconv.Atoi(columnNames[i])
+ if tempInt > len(reader.Header()) || tempInt == 0 {
+ return format.ErrInvalidColumnIndex
+ }
+ } else {
+ if reader.options.HeaderOpt && !format.StringInSlice(columnNames[i], reader.Header()) {
+ return format.ErrParseInvalidPathComponent
+ }
+ }
+ }
+ return nil
+}
diff --git a/pkg/s3select/format/errors.go b/pkg/s3select/format/errors.go
new file mode 100644
index 000000000..598a0efa5
--- /dev/null
+++ b/pkg/s3select/format/errors.go
@@ -0,0 +1,38 @@
+/*
+ * Minio Cloud Storage, (C) 2018 Minio, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package format
+
+import "errors"
+
+// ErrTruncatedInput is an error if the object is not compressed properly and an
+// error occurs during decompression.
+var ErrTruncatedInput = errors.New("Object decompression failed. Check that the object is properly compressed using the format specified in the request")
+
+// ErrCSVParsingError is an error if the CSV presents an error while being
+// parsed.
+var ErrCSVParsingError = errors.New("Encountered an Error parsing the CSV file. Check the file and try again")
+
+// ErrInvalidColumnIndex is an error if you provide a column index which is not
+// valid.
+var ErrInvalidColumnIndex = errors.New("Column index in the SQL expression is invalid")
+
+// ErrParseInvalidPathComponent is an error that occurs if there is an invalid
+// path component.
+var ErrParseInvalidPathComponent = errors.New("The SQL expression contains an invalid path component")
+
+// ErrJSONParsingError is an error if while parsing the JSON an error arises.
+var ErrJSONParsingError = errors.New("Encountered an error parsing the JSON file. Check the file and try again")
diff --git a/pkg/s3select/format/helpers.go b/pkg/s3select/format/helpers.go
new file mode 100644
index 000000000..a4a1ecde7
--- /dev/null
+++ b/pkg/s3select/format/helpers.go
@@ -0,0 +1,50 @@
+/*
+ * Minio Cloud Storage, (C) 2018 Minio, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package format
+
+import "strconv"
+
+// IsInt - returns a true or false, whether a string can
+// be represented as an int.
+func IsInt(s string) bool {
+ _, err := strconv.Atoi(s)
+ return err == nil
+}
+
+// StringInSlice - this function finds whether a string is in a list
+func StringInSlice(x string, list []string) bool {
+ for _, y := range list {
+ if x == y {
+ return true
+ }
+ }
+ return false
+}
+
+// ProcessSize - this function processes size so that we can calculate bytes BytesProcessed.
+func ProcessSize(myrecord []string) int64 {
+ if len(myrecord) > 0 {
+ var size int64
+ size = int64(len(myrecord)-1) + 1
+ for i := range myrecord {
+ size += int64(len(myrecord[i]))
+ }
+
+ return size
+ }
+ return 0
+}
diff --git a/pkg/s3select/format/json/json.go b/pkg/s3select/format/json/json.go
new file mode 100644
index 000000000..fab48bcf5
--- /dev/null
+++ b/pkg/s3select/format/json/json.go
@@ -0,0 +1,200 @@
+/*
+ * Minio Cloud Storage, (C) 2018 Minio, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package json
+
+import (
+ "compress/bzip2"
+ "encoding/json"
+ "encoding/xml"
+ "io"
+
+ jsoniter "github.com/json-iterator/go"
+ gzip "github.com/klauspost/pgzip"
+ "github.com/minio/minio/pkg/s3select/format"
+)
+
+// Options options are passed to the underlying encoding/json reader.
+type Options struct {
+
+ // Name of the table that is used for querying
+ Name string
+
+ // ReadFrom is where the data will be read from.
+ ReadFrom io.Reader
+
+ // If true then we need to add gzip or bzip reader.
+ // to extract the csv.
+ Compressed string
+
+ // SQL expression meant to be evaluated.
+ Expression string
+
+ // What the outputted will be delimited by .
+ RecordDelimiter string
+
+ // Size of incoming object
+ StreamSize int64
+
+ // True if Type is DOCUMENTS
+ Type bool
+
+ // Progress enabled, enable/disable progress messages.
+ Progress bool
+}
+
+// jinput represents a record producing input from a formatted file or pipe.
+type jinput struct {
+ options *Options
+ reader *jsoniter.Decoder
+ firstRow []string
+ header []string
+ minOutputLength int
+ stats struct {
+ BytesScanned int64
+ BytesReturned int64
+ BytesProcessed int64
+ }
+}
+
+// New sets up a new, the first Json is read when this is run.
+// If there is a problem with reading the first Json, the error is returned.
+// Otherwise, the returned reader can be reliably consumed with jsonRead()
+// until jsonRead() returns nil.
+func New(opts *Options) (format.Select, error) {
+ myReader := opts.ReadFrom
+ var tempBytesScanned int64
+ tempBytesScanned = 0
+ switch opts.Compressed {
+ case "GZIP":
+ tempBytesScanned = opts.StreamSize
+ var err error
+ if myReader, err = gzip.NewReader(opts.ReadFrom); err != nil {
+ return nil, format.ErrTruncatedInput
+ }
+ case "BZIP2":
+ tempBytesScanned = opts.StreamSize
+ myReader = bzip2.NewReader(opts.ReadFrom)
+ }
+
+ reader := &jinput{
+ options: opts,
+ reader: jsoniter.NewDecoder(myReader),
+ }
+ reader.stats.BytesScanned = tempBytesScanned
+ reader.stats.BytesProcessed = 0
+ reader.stats.BytesReturned = 0
+
+ return reader, nil
+}
+
+// Progress - return true if progress was requested.
+func (reader *jinput) Progress() bool {
+ return reader.options.Progress
+}
+
+// UpdateBytesProcessed - populates the bytes Processed
+func (reader *jinput) UpdateBytesProcessed(record map[string]interface{}) {
+ out, _ := json.Marshal(record)
+ reader.stats.BytesProcessed += int64(len(out))
+}
+
+// Read the file and returns map[string]interface{}
+func (reader *jinput) Read() (map[string]interface{}, error) {
+ dec := reader.reader
+ var record interface{}
+ for {
+ err := dec.Decode(&record)
+ if err == io.EOF || err == io.ErrClosedPipe {
+ break
+ }
+ if err != nil {
+ return nil, format.ErrJSONParsingError
+ }
+ return record.(map[string]interface{}), nil
+ }
+ return nil, nil
+}
+
+// OutputFieldDelimiter - returns the delimiter specified in input request
+func (reader *jinput) OutputFieldDelimiter() string {
+ return ","
+}
+
+// HasHeader - returns true or false depending upon the header.
+func (reader *jinput) HasHeader() bool {
+ return false
+}
+
+// Expression - return the Select Expression for
+func (reader *jinput) Expression() string {
+ return reader.options.Expression
+}
+
+// UpdateBytesReturned - updates the Bytes returned for
+func (reader *jinput) UpdateBytesReturned(size int64) {
+ reader.stats.BytesReturned += size
+}
+
+// Header returns a nil in case of
+func (reader *jinput) Header() []string {
+ return nil
+}
+
+// CreateStatXML is the function which does the marshaling from the stat
+// structs into XML so that the progress and stat message can be sent
+func (reader *jinput) CreateStatXML() (string, error) {
+ if reader.options.Compressed == "NONE" {
+ reader.stats.BytesProcessed = reader.options.StreamSize
+ reader.stats.BytesScanned = reader.stats.BytesProcessed
+ }
+ out, err := xml.Marshal(&format.Stats{
+ BytesScanned: reader.stats.BytesScanned,
+ BytesProcessed: reader.stats.BytesProcessed,
+ BytesReturned: reader.stats.BytesReturned,
+ })
+ if err != nil {
+ return "", err
+ }
+ return xml.Header + string(out), nil
+}
+
+// CreateProgressXML is the function which does the marshaling from the progress
+// structs into XML so that the progress and stat message can be sent
+func (reader *jinput) CreateProgressXML() (string, error) {
+ if !(reader.options.Compressed != "NONE") {
+ reader.stats.BytesScanned = reader.stats.BytesProcessed
+ }
+ out, err := xml.Marshal(&format.Progress{
+ BytesScanned: reader.stats.BytesScanned,
+ BytesProcessed: reader.stats.BytesProcessed,
+ BytesReturned: reader.stats.BytesReturned,
+ })
+ if err != nil {
+ return "", err
+ }
+ return xml.Header + string(out), nil
+}
+
+// Type - return the data format type {
+func (reader *jinput) Type() format.Type {
+ return format.JSON
+}
+
+// ColNameErrs - this is a dummy function for JSON input type.
+func (reader *jinput) ColNameErrs(columnNames []string) error {
+ return nil
+}
diff --git a/pkg/s3select/format/select.go b/pkg/s3select/format/select.go
new file mode 100644
index 000000000..38e24d581
--- /dev/null
+++ b/pkg/s3select/format/select.go
@@ -0,0 +1,63 @@
+/*
+ * Minio Cloud Storage, (C) 2018 Minio, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package format
+
+import "encoding/xml"
+
+// Select Interface helper methods, implementing features needed for
+// https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectSELECTContent.html
+type Select interface {
+ Type() Type
+ Read() (map[string]interface{}, error)
+ Header() []string
+ HasHeader() bool
+ OutputFieldDelimiter() string
+ UpdateBytesProcessed(record map[string]interface{})
+ Expression() string
+ UpdateBytesReturned(int64)
+ CreateStatXML() (string, error)
+ CreateProgressXML() (string, error)
+ ColNameErrs(columnNames []string) error
+ Progress() bool
+}
+
+// Progress represents a struct that represents the format for XML of the
+// progress messages
+type Progress struct {
+ XMLName xml.Name `xml:"Progress" json:"-"`
+ BytesScanned int64 `xml:"BytesScanned"`
+ BytesProcessed int64 `xml:"BytesProcessed"`
+ BytesReturned int64 `xml:"BytesReturned"`
+}
+
+// Stats represents a struct that represents the format for XML of the stat
+// messages
+type Stats struct {
+ XMLName xml.Name `xml:"Stats" json:"-"`
+ BytesScanned int64 `xml:"BytesScanned"`
+ BytesProcessed int64 `xml:"BytesProcessed"`
+ BytesReturned int64 `xml:"BytesReturned"`
+}
+
+// Type different types of support data format types.
+type Type string
+
+// Different data format types.
+const (
+ JSON Type = "json"
+ CSV Type = "csv"
+)
diff --git a/pkg/s3select/helpers.go b/pkg/s3select/helpers.go
index 39bf29736..ae6375415 100644
--- a/pkg/s3select/helpers.go
+++ b/pkg/s3select/helpers.go
@@ -17,94 +17,58 @@
package s3select
import (
+ "encoding/json"
"fmt"
"math"
"reflect"
"strconv"
"strings"
+ "github.com/minio/minio/pkg/s3select/format"
+ "github.com/tidwall/gjson"
"github.com/xwb1989/sqlparser"
)
// MaxExpressionLength - 256KiB
const MaxExpressionLength = 256 * 1024
-// This function processes size so that we can calculate bytes BytesProcessed.
-func processSize(myrecord []string) int64 {
- if len(myrecord) > 0 {
- var size int64
- size = int64(len(myrecord)-1) + 1
- for i := range myrecord {
- size += int64(len(myrecord[i]))
- }
-
- return size
- }
- return 0
-}
-
-// This function finds whether a string is in a list
-func stringInSlice(x string, list []string) bool {
- for _, y := range list {
- if x == y {
- return true
- }
- }
- return false
-}
-
-// This function returns the index of a string in a list
-func stringIndex(a string, list []string) int {
- for i, v := range list {
- if v == a {
- return i
- }
- }
- return -1
-}
-
-// Returns a true or false, whether a string can be represented as an int.
-func representsInt(s string) bool {
- _, err := strconv.Atoi(s)
- return err == nil
-}
-
-// The function below processes the where clause into an acutal boolean given a
-// row
-func matchesMyWhereClause(row []string, columnNames map[string]int, alias string, whereClause interface{}) (bool, error) {
- // This particular logic deals with the details of casting, e.g if we have to
- // cast a column of string numbers into int's for comparison.
+// matchesMyWhereClause takes map[string]interfaces{} , process the where clause and returns true if the row suffices
+func matchesMyWhereClause(record map[string]interface{}, alias string, whereClause interface{}) (bool, error) {
var conversionColumn string
var operator string
var operand interface{}
if fmt.Sprintf("%v", whereClause) == "false" {
return false, nil
}
+ out, err := json.Marshal(record)
+ if err != nil {
+ return false, ErrExternalEvalException
+ }
switch expr := whereClause.(type) {
case *sqlparser.IsExpr:
- return evaluateIsExpr(expr, row, columnNames, alias)
+ return evaluateIsExpr(expr, string(out), alias)
case *sqlparser.RangeCond:
operator = expr.Operator
if operator != "between" && operator != "not between" {
return false, ErrUnsupportedSQLOperation
}
if operator == "not between" {
- myResult, err := evaluateBetween(expr, alias, row, columnNames)
+ result, err := evaluateBetween(expr, alias, string(out))
if err != nil {
return false, err
}
- return !myResult, nil
+ return !result, nil
}
- myResult, err := evaluateBetween(expr, alias, row, columnNames)
+ result, err := evaluateBetween(expr, alias, string(out))
if err != nil {
return false, err
}
- return myResult, nil
+ return result, nil
case *sqlparser.ComparisonExpr:
operator = expr.Operator
switch right := expr.Right.(type) {
case *sqlparser.FuncExpr:
- operand = evaluateFuncExpr(right, "", row, columnNames)
+ operand = evaluateFuncExpr(right, "", string(out))
case *sqlparser.SQLVal:
var err error
operand, err = evaluateParserType(right)
@@ -116,29 +80,22 @@ func matchesMyWhereClause(row []string, columnNames map[string]int, alias string
myVal = ""
switch left := expr.Left.(type) {
case *sqlparser.FuncExpr:
- myVal = evaluateFuncExpr(left, "", row, columnNames)
+ myVal = evaluateFuncExpr(left, "", string(out))
conversionColumn = ""
case *sqlparser.ColName:
- conversionColumn = cleanCol(left.Name.CompliantName(), alias)
- }
- if representsInt(conversionColumn) {
- intCol, err := strconv.Atoi(conversionColumn)
- if err != nil {
- return false, err
- }
- // Subtract 1 out because the index starts at 1 for Amazon instead of 0.
- return evaluateOperator(row[intCol-1], operator, operand)
+ conversionColumn = left.Name.CompliantName()
}
+
if myVal != "" {
return evaluateOperator(myVal, operator, operand)
}
- return evaluateOperator(row[columnNames[conversionColumn]], operator, operand)
+ return evaluateOperator(jsonValue(conversionColumn, string(out)), operator, operand)
case *sqlparser.AndExpr:
var leftVal bool
var rightVal bool
switch left := expr.Left.(type) {
case *sqlparser.ComparisonExpr:
- temp, err := matchesMyWhereClause(row, columnNames, alias, left)
+ temp, err := matchesMyWhereClause(record, alias, left)
if err != nil {
return false, err
}
@@ -146,7 +103,7 @@ func matchesMyWhereClause(row []string, columnNames map[string]int, alias string
}
switch right := expr.Right.(type) {
case *sqlparser.ComparisonExpr:
- temp, err := matchesMyWhereClause(row, columnNames, alias, right)
+ temp, err := matchesMyWhereClause(record, alias, right)
if err != nil {
return false, err
}
@@ -158,18 +115,18 @@ func matchesMyWhereClause(row []string, columnNames map[string]int, alias string
var rightVal bool
switch left := expr.Left.(type) {
case *sqlparser.ComparisonExpr:
- leftVal, _ = matchesMyWhereClause(row, columnNames, alias, left)
+ leftVal, _ = matchesMyWhereClause(record, alias, left)
}
switch right := expr.Right.(type) {
case *sqlparser.ComparisonExpr:
- rightVal, _ = matchesMyWhereClause(row, columnNames, alias, right)
+ rightVal, _ = matchesMyWhereClause(record, alias, right)
}
return (rightVal || leftVal), nil
-
}
return true, nil
}
+
func applyStrFunc(rawArg string, funcName string) string {
switch strings.ToUpper(funcName) {
case "TRIM":
@@ -192,6 +149,135 @@ func applyStrFunc(rawArg string, funcName string) string {
}
+// evaluateBetween is a function which evaluates a Between Clause.
+func evaluateBetween(betweenExpr *sqlparser.RangeCond, alias string, record string) (bool, error) {
+ var colToVal interface{}
+ var colFromVal interface{}
+ var conversionColumn string
+ var funcName string
+ switch colTo := betweenExpr.To.(type) {
+ case sqlparser.Expr:
+ switch colToMyVal := colTo.(type) {
+ case *sqlparser.FuncExpr:
+ colToVal = stringOps(colToMyVal, record, "")
+ case *sqlparser.SQLVal:
+ var err error
+ colToVal, err = evaluateParserType(colToMyVal)
+ if err != nil {
+ return false, err
+ }
+ }
+ }
+ switch colFrom := betweenExpr.From.(type) {
+ case sqlparser.Expr:
+ switch colFromMyVal := colFrom.(type) {
+ case *sqlparser.FuncExpr:
+ colFromVal = stringOps(colFromMyVal, record, "")
+ case *sqlparser.SQLVal:
+ var err error
+ colFromVal, err = evaluateParserType(colFromMyVal)
+ if err != nil {
+ return false, err
+ }
+ }
+ }
+ var myFuncVal string
+ switch left := betweenExpr.Left.(type) {
+ case *sqlparser.FuncExpr:
+ myFuncVal = evaluateFuncExpr(left, "", record)
+ conversionColumn = ""
+ case *sqlparser.ColName:
+ conversionColumn = cleanCol(left.Name.CompliantName(), alias)
+ }
+ toGreater, err := evaluateOperator(fmt.Sprintf("%v", colToVal), ">", colFromVal)
+ if err != nil {
+ return false, err
+ }
+ if toGreater {
+ return evalBetweenGreater(conversionColumn, record, funcName, colFromVal, colToVal, myFuncVal)
+ }
+ return evalBetweenLess(conversionColumn, record, funcName, colFromVal, colToVal, myFuncVal)
+}
+
+// evalBetweenGreater is a function which evaluates the between given that the
+// TO is > than the FROM.
+func evalBetweenGreater(conversionColumn string, record string, funcName string, colFromVal interface{}, colToVal interface{}, myColVal string) (bool, error) {
+ if format.IsInt(conversionColumn) {
+ myVal, err := evaluateOperator(jsonValue("_"+conversionColumn, record), ">=", colFromVal)
+ if err != nil {
+ return false, err
+ }
+ var myOtherVal bool
+ myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), ">=", checkStringType(jsonValue("_"+conversionColumn, record)))
+ if err != nil {
+ return false, err
+ }
+ return (myVal && myOtherVal), nil
+ }
+ if myColVal != "" {
+ myVal, err := evaluateOperator(myColVal, ">=", colFromVal)
+ if err != nil {
+ return false, err
+ }
+ var myOtherVal bool
+ myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), ">=", checkStringType(myColVal))
+ if err != nil {
+ return false, err
+ }
+ return (myVal && myOtherVal), nil
+ }
+ myVal, err := evaluateOperator(jsonValue(conversionColumn, record), ">=", colFromVal)
+ if err != nil {
+ return false, err
+ }
+ var myOtherVal bool
+ myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), ">=", checkStringType(jsonValue(conversionColumn, record)))
+ if err != nil {
+ return false, err
+ }
+ return (myVal && myOtherVal), nil
+}
+
+// evalBetweenLess is a function which evaluates the between given that the
+// FROM is > than the TO.
+func evalBetweenLess(conversionColumn string, record string, funcName string, colFromVal interface{}, colToVal interface{}, myColVal string) (bool, error) {
+ if format.IsInt(conversionColumn) {
+ // Subtract 1 out because the index starts at 1 for Amazon instead of 0.
+ myVal, err := evaluateOperator(jsonValue("_"+conversionColumn, record), "<=", colFromVal)
+ if err != nil {
+ return false, err
+ }
+ var myOtherVal bool
+ myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), "<=", checkStringType(jsonValue("_"+conversionColumn, record)))
+ if err != nil {
+ return false, err
+ }
+ return (myVal && myOtherVal), nil
+ }
+ if myColVal != "" {
+ myVal, err := evaluateOperator(myColVal, "<=", colFromVal)
+ if err != nil {
+ return false, err
+ }
+ var myOtherVal bool
+ myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), "<=", checkStringType(myColVal))
+ if err != nil {
+ return false, err
+ }
+ return (myVal && myOtherVal), nil
+ }
+ myVal, err := evaluateOperator(jsonValue(conversionColumn, record), "<=", colFromVal)
+ if err != nil {
+ return false, err
+ }
+ var myOtherVal bool
+ myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), "<=", checkStringType(jsonValue(conversionColumn, record)))
+ if err != nil {
+ return false, err
+ }
+ return (myVal && myOtherVal), nil
+}
+
// This is a really important function it actually evaluates the boolean
// statement and therefore actually returns a bool, it functions as the lowest
// level of the state machine.
@@ -432,151 +518,16 @@ func cleanCol(myCol string, alias string) string {
return myCol
}
-// evaluateBetween is a function which evaluates a Between Clause.
-func evaluateBetween(betweenExpr *sqlparser.RangeCond, alias string, record []string, columnNames map[string]int) (bool, error) {
- var colToVal interface{}
- var colFromVal interface{}
- var conversionColumn string
- var funcName string
- switch colTo := betweenExpr.To.(type) {
- case sqlparser.Expr:
- switch colToMyVal := colTo.(type) {
- case *sqlparser.FuncExpr:
- var temp string
- temp = stringOps(colToMyVal, record, "", columnNames)
- colToVal = []byte(temp)
- case *sqlparser.SQLVal:
- var err error
- colToVal, err = evaluateParserType(colToMyVal)
- if err != nil {
- return false, err
- }
- }
- }
- switch colFrom := betweenExpr.From.(type) {
- case sqlparser.Expr:
- switch colFromMyVal := colFrom.(type) {
- case *sqlparser.FuncExpr:
- colFromVal = stringOps(colFromMyVal, record, "", columnNames)
- case *sqlparser.SQLVal:
- var err error
- colFromVal, err = evaluateParserType(colFromMyVal)
- if err != nil {
- return false, err
- }
- }
- }
- var myFuncVal string
- myFuncVal = ""
- switch left := betweenExpr.Left.(type) {
- case *sqlparser.FuncExpr:
- myFuncVal = evaluateFuncExpr(left, "", record, columnNames)
- conversionColumn = ""
- case *sqlparser.ColName:
- conversionColumn = cleanCol(left.Name.CompliantName(), alias)
- }
-
- toGreater, err := evaluateOperator(fmt.Sprintf("%v", colToVal), ">", colFromVal)
- if err != nil {
- return false, err
- }
- if toGreater {
- return evalBetweenGreater(conversionColumn, record, funcName, columnNames, colFromVal, colToVal, myFuncVal)
- }
- return evalBetweenLess(conversionColumn, record, funcName, columnNames, colFromVal, colToVal, myFuncVal)
-}
-
-// evalBetweenLess is a function which evaluates the between given that the
-// FROM is > than the TO.
-func evalBetweenLess(conversionColumn string, record []string, funcName string, columnNames map[string]int, colFromVal interface{}, colToVal interface{}, myCoalVal string) (bool, error) {
- if representsInt(conversionColumn) {
- myIndex, _ := strconv.Atoi(conversionColumn)
- // Subtract 1 out because the index starts at 1 for Amazon instead of 0.
- myVal, err := evaluateOperator(record[myIndex-1], "<=", colFromVal)
- if err != nil {
- return false, err
- }
- var myOtherVal bool
- myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), "<=", checkStringType(record[myIndex-1]))
- if err != nil {
- return false, err
- }
- return (myVal && myOtherVal), nil
- }
- if myCoalVal != "" {
- myVal, err := evaluateOperator(myCoalVal, "<=", colFromVal)
- if err != nil {
- return false, err
- }
- var myOtherVal bool
- myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), "<=", checkStringType(myCoalVal))
- if err != nil {
- return false, err
- }
- return (myVal && myOtherVal), nil
- }
- myVal, err := evaluateOperator(record[columnNames[conversionColumn]], "<=", colFromVal)
- if err != nil {
- return false, err
- }
- var myOtherVal bool
- myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), "<=", checkStringType(record[columnNames[conversionColumn]]))
- if err != nil {
- return false, err
- }
- return (myVal && myOtherVal), nil
-}
-
-// evalBetweenGreater is a function which evaluates the between given that the
-// TO is > than the FROM.
-func evalBetweenGreater(conversionColumn string, record []string, funcName string, columnNames map[string]int, colFromVal interface{}, colToVal interface{}, myCoalVal string) (bool, error) {
- if representsInt(conversionColumn) {
- myIndex, _ := strconv.Atoi(conversionColumn)
- myVal, err := evaluateOperator(record[myIndex-1], ">=", colFromVal)
- if err != nil {
- return false, err
- }
- var myOtherVal bool
- myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), ">=", checkStringType(record[myIndex-1]))
- if err != nil {
- return false, err
- }
- return (myVal && myOtherVal), nil
- }
- if myCoalVal != "" {
- myVal, err := evaluateOperator(myCoalVal, ">=", colFromVal)
- if err != nil {
- return false, err
- }
- var myOtherVal bool
- myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), ">=", checkStringType(myCoalVal))
- if err != nil {
- return false, err
- }
- return (myVal && myOtherVal), nil
- }
- myVal, err := evaluateOperator(record[columnNames[conversionColumn]], ">=", colFromVal)
- if err != nil {
- return false, err
- }
- var myOtherVal bool
- myOtherVal, err = evaluateOperator(fmt.Sprintf("%v", colToVal), ">=", checkStringType(record[columnNames[conversionColumn]]))
- if err != nil {
- return false, err
- }
- return (myVal && myOtherVal), nil
-}
-
// whereClauseNameErrs is a function which returns an error if there is a column
// in the where clause which does not exist.
-func (reader *Input) whereClauseNameErrs(whereClause interface{}, alias string) error {
+func whereClauseNameErrs(whereClause interface{}, alias string, f format.Select) error {
var conversionColumn string
switch expr := whereClause.(type) {
// case for checking errors within a clause of the form "col_name is ..."
case *sqlparser.IsExpr:
switch myCol := expr.Expr.(type) {
case *sqlparser.FuncExpr:
- if err := reader.evaluateFuncErr(myCol); err != nil {
+ if err := evaluateFuncErr(myCol, f); err != nil {
return err
}
case *sqlparser.ColName:
@@ -585,7 +536,7 @@ func (reader *Input) whereClauseNameErrs(whereClause interface{}, alias string)
case *sqlparser.RangeCond:
switch left := expr.Left.(type) {
case *sqlparser.FuncExpr:
- if err := reader.evaluateFuncErr(left); err != nil {
+ if err := evaluateFuncErr(left, f); err != nil {
return err
}
case *sqlparser.ColName:
@@ -594,7 +545,7 @@ func (reader *Input) whereClauseNameErrs(whereClause interface{}, alias string)
case *sqlparser.ComparisonExpr:
switch left := expr.Left.(type) {
case *sqlparser.FuncExpr:
- if err := reader.evaluateFuncErr(left); err != nil {
+ if err := evaluateFuncErr(left, f); err != nil {
return err
}
case *sqlparser.ColName:
@@ -603,54 +554,30 @@ func (reader *Input) whereClauseNameErrs(whereClause interface{}, alias string)
case *sqlparser.AndExpr:
switch left := expr.Left.(type) {
case *sqlparser.ComparisonExpr:
- return reader.whereClauseNameErrs(left, alias)
+ return whereClauseNameErrs(left, alias, f)
}
switch right := expr.Right.(type) {
case *sqlparser.ComparisonExpr:
- return reader.whereClauseNameErrs(right, alias)
+ return whereClauseNameErrs(right, alias, f)
}
case *sqlparser.OrExpr:
switch left := expr.Left.(type) {
case *sqlparser.ComparisonExpr:
- return reader.whereClauseNameErrs(left, alias)
+ return whereClauseNameErrs(left, alias, f)
}
switch right := expr.Right.(type) {
case *sqlparser.ComparisonExpr:
- return reader.whereClauseNameErrs(right, alias)
+ return whereClauseNameErrs(right, alias, f)
}
}
if conversionColumn != "" {
- return reader.colNameErrs([]string{conversionColumn})
- }
- return nil
-}
-
-// colNameErrs is a function which makes sure that the headers are requested are
-// present in the file otherwise it throws an error.
-func (reader *Input) colNameErrs(columnNames []string) error {
- for i := 0; i < len(columnNames); i++ {
- if columnNames[i] == "" {
- continue
- }
- if !representsInt(columnNames[i]) && !reader.options.HeaderOpt {
- return ErrInvalidColumnIndex
- }
- if representsInt(columnNames[i]) {
- tempInt, _ := strconv.Atoi(columnNames[i])
- if tempInt > len(reader.Header()) || tempInt == 0 {
- return ErrInvalidColumnIndex
- }
- } else {
- if reader.options.HeaderOpt && !stringInSlice(columnNames[i], reader.Header()) {
- return ErrMissingHeaders
- }
- }
+ return f.ColNameErrs([]string{conversionColumn})
}
return nil
}
// aggFuncToStr converts an array of floats into a properly formatted string.
-func (reader *Input) aggFuncToStr(aggVals []float64) string {
+func aggFuncToStr(aggVals []float64, f format.Select) string {
// Define a number formatting function
numToStr := func(f float64) string {
if f == math.Trunc(f) {
@@ -666,7 +593,7 @@ func (reader *Input) aggFuncToStr(aggVals []float64) string {
}
// Intersperse field delimiter
- return strings.Join(vals, reader.options.OutputFieldDelimiter)
+ return strings.Join(vals, f.OutputFieldDelimiter())
}
// checkForDuplicates ensures we do not have an ambigious column name.
@@ -714,18 +641,18 @@ func evaluateParserType(col *sqlparser.SQLVal) (interface{}, error) {
// parseErrs is the function which handles all the errors that could occur
// through use of function arguments such as column names in NULLIF
-func (reader *Input) parseErrs(columnNames []string, whereClause interface{}, alias string, myFuncs *SelectFuncs) error {
+func parseErrs(columnNames []string, whereClause interface{}, alias string, myFuncs *SelectFuncs, f format.Select) error {
// Below code cleans up column names.
- reader.processColumnNames(columnNames, alias)
+ processColumnNames(columnNames, alias, f)
if columnNames[0] != "*" {
- if err := reader.colNameErrs(columnNames); err != nil {
+ if err := f.ColNameErrs(columnNames); err != nil {
return err
}
}
// Below code ensures the whereClause has no errors.
if whereClause != nil {
tempClause := whereClause
- if err := reader.whereClauseNameErrs(tempClause, alias); err != nil {
+ if err := whereClauseNameErrs(tempClause, alias, f); err != nil {
return err
}
}
@@ -733,9 +660,16 @@ func (reader *Input) parseErrs(columnNames []string, whereClause interface{}, al
if myFuncs.funcExpr[i] == nil {
continue
}
- if err := reader.evaluateFuncErr(myFuncs.funcExpr[i]); err != nil {
+ if err := evaluateFuncErr(myFuncs.funcExpr[i], f); err != nil {
return err
}
}
return nil
}
+
+// It return the value corresponding to the tag in Json .
+// Input is the Key and row is the JSON string
+func jsonValue(input string, row string) string {
+ value := gjson.Get(row, input)
+ return value.String()
+}
diff --git a/pkg/s3select/input.go b/pkg/s3select/input.go
index b814c2294..3c8a2e430 100644
--- a/pkg/s3select/input.go
+++ b/pkg/s3select/input.go
@@ -18,18 +18,14 @@ package s3select
import (
"bytes"
- "compress/bzip2"
- "encoding/csv"
- "encoding/xml"
"io"
- "strconv"
+ "net/http"
"strings"
"time"
- "net/http"
-
- gzip "github.com/klauspost/pgzip"
- "github.com/minio/minio/pkg/ioutil"
+ "github.com/minio/minio/pkg/s3select/format"
+ "github.com/minio/minio/pkg/s3select/format/csv"
+ "github.com/minio/minio/pkg/s3select/format/json"
)
const (
@@ -40,245 +36,16 @@ const (
continuationTime time.Duration = 5 * time.Second
)
-// progress represents a struct that represents the format for XML of the
-// progress messages
-type progress struct {
- XMLName xml.Name `xml:"Progress" json:"-"`
- BytesScanned int64 `xml:"BytesScanned"`
- BytesProcessed int64 `xml:"BytesProcessed"`
- BytesReturned int64 `xml:"BytesReturned"`
-}
-
-// stats represents a struct that represents the format for XML of the stat
-// messages
-type stats struct {
- XMLName xml.Name `xml:"Stats" json:"-"`
- BytesScanned int64 `xml:"BytesScanned"`
- BytesProcessed int64 `xml:"BytesProcessed"`
- BytesReturned int64 `xml:"BytesReturned"`
-}
-
-// StatInfo is a struct that represents the
-type statInfo struct {
- BytesScanned int64
- BytesReturned int64
- BytesProcessed int64
-}
-
-// Input represents a record producing input from a formatted file or pipe.
-type Input struct {
- options *Options
- reader *csv.Reader
- firstRow []string
- header []string
- minOutputLength int
- stats *statInfo
-}
-
-// Options options are passed to the underlying encoding/csv reader.
-type Options struct {
- // HasHeader when true, will treat the first row as a header row.
- HasHeader bool
-
- // RecordDelimiter is the string that records are delimited by.
- RecordDelimiter string
-
- // FieldDelimiter is the string that fields are delimited by.
- FieldDelimiter string
-
- // Comments is the string the first character of a line of
- // text matches the comment character.
- Comments string
-
- // Name of the table that is used for querying
- Name string
-
- // ReadFrom is where the data will be read from.
- ReadFrom io.Reader
-
- // If true then we need to add gzip or bzip reader.
- // to extract the csv.
- Compressed string
-
- // SQL expression meant to be evaluated.
- Expression string
-
- // What the outputted CSV will be delimited by .
- OutputFieldDelimiter string
-
- // Size of incoming object
- StreamSize int64
-
- // Whether Header is "USE" or another
- HeaderOpt bool
-
- // Progress enabled, enable/disable progress messages.
- Progress bool
-}
-
-// NewInput sets up a new Input, the first row is read when this is run.
-// If there is a problem with reading the first row, the error is returned.
-// Otherwise, the returned reader can be reliably consumed with ReadRecord()
-// until ReadRecord() returns nil.
-func NewInput(opts *Options) (*Input, error) {
- myReader := opts.ReadFrom
- var tempBytesScanned int64
- tempBytesScanned = 0
- switch opts.Compressed {
- case "GZIP":
- tempBytesScanned = opts.StreamSize
- var err error
- if myReader, err = gzip.NewReader(opts.ReadFrom); err != nil {
- return nil, ErrTruncatedInput
- }
- case "BZIP2":
- tempBytesScanned = opts.StreamSize
- myReader = bzip2.NewReader(opts.ReadFrom)
- }
-
- // DelimitedReader treats custom record delimiter like `\r\n`,`\r`,`ab` etc and replaces it with `\n`.
- normalizedReader := ioutil.NewDelimitedReader(myReader, []rune(opts.RecordDelimiter))
- progress := &statInfo{
- BytesScanned: tempBytesScanned,
- BytesProcessed: 0,
- BytesReturned: 0,
- }
-
- reader := &Input{
- options: opts,
- reader: csv.NewReader(normalizedReader),
- stats: progress,
- }
- reader.firstRow = nil
-
- reader.reader.FieldsPerRecord = -1
- if reader.options.FieldDelimiter != "" {
- reader.reader.Comma = rune(reader.options.FieldDelimiter[0])
- }
-
- if reader.options.Comments != "" {
- reader.reader.Comment = rune(reader.options.Comments[0])
- }
-
- // QuoteCharacter - " (defaulted currently)
- reader.reader.LazyQuotes = true
-
- if err := reader.readHeader(); err != nil {
- return nil, err
- }
-
- return reader, nil
-}
-
-// ReadRecord reads a single record from the . Always returns successfully.
-// If the record is empty, an empty []string is returned.
-// Record expand to match the current row size, adding blank fields as needed.
-// Records never return less then the number of fields in the first row.
-// Returns nil on EOF
-// In the event of a parse error due to an invalid record, it is logged, and
-// an empty []string is returned with the number of fields in the first row,
-// as if the record were empty.
-//
-// In general, this is a very tolerant of problems reader.
-func (reader *Input) ReadRecord() []string {
- var row []string
- var fileErr error
-
- if reader.firstRow != nil {
- row = reader.firstRow
- reader.firstRow = nil
- return row
- }
-
- row, fileErr = reader.reader.Read()
- emptysToAppend := reader.minOutputLength - len(row)
- if fileErr == io.EOF || fileErr == io.ErrClosedPipe {
- return nil
- } else if _, ok := fileErr.(*csv.ParseError); ok {
- emptysToAppend = reader.minOutputLength
- }
-
- if emptysToAppend > 0 {
- for counter := 0; counter < emptysToAppend; counter++ {
- row = append(row, "")
- }
- }
-
- return row
-}
-
-// readHeader reads the header into the header variable if the header is present
-// as the first row of the csv
-func (reader *Input) readHeader() error {
- var readErr error
- if reader.options.HasHeader {
- reader.firstRow, readErr = reader.reader.Read()
- if readErr != nil {
- return ErrCSVParsingError
- }
- reader.header = cleanHeader(reader.firstRow)
- reader.firstRow = nil
- reader.minOutputLength = len(reader.header)
- } else {
- reader.firstRow, readErr = reader.reader.Read()
- reader.header = make([]string, len(reader.firstRow))
- for i := 0; i < reader.minOutputLength; i++ {
- reader.header[i] = strconv.Itoa(i)
- }
-
- }
- return nil
-}
-
-// Replace the spaces in columnnames with underscores
-func cleanHeader(columns []string) []string {
- for i := 0; i < len(columns); i++ {
- columns[i] = strings.Replace(columns[i], " ", "_", -1)
- }
- return columns
-}
-
-// createStatXML is the function which does the marshaling from the stat
-// structs into XML so that the progress and stat message can be sent
-func (reader *Input) createStatXML() (string, error) {
- if reader.options.Compressed == "NONE" {
- reader.stats.BytesProcessed = reader.options.StreamSize
- reader.stats.BytesScanned = reader.stats.BytesProcessed
- }
- out, err := xml.Marshal(&stats{
- BytesScanned: reader.stats.BytesScanned,
- BytesProcessed: reader.stats.BytesProcessed,
- BytesReturned: reader.stats.BytesReturned,
- })
- if err != nil {
- return "", err
- }
- return xml.Header + string(out), nil
-}
-
-// createProgressXML is the function which does the marshaling from the progress structs into XML so that the progress and stat message can be sent
-func (reader *Input) createProgressXML() (string, error) {
- if reader.options.HasHeader {
- reader.stats.BytesProcessed += processSize(reader.header)
- }
- if reader.options.Compressed == "NONE" {
- reader.stats.BytesScanned = reader.stats.BytesProcessed
- }
- out, err := xml.Marshal(&progress{
- BytesScanned: reader.stats.BytesScanned,
- BytesProcessed: reader.stats.BytesProcessed,
- BytesReturned: reader.stats.BytesReturned,
- })
- if err != nil {
- return "", err
- }
- return xml.Header + string(out), nil
-}
-
-// Header returns the header of the reader. Either the first row if a header
-// set in the options, or c#, where # is the column number, starting with 0.
-func (reader *Input) Header() []string {
- return reader.header
+// ParseSelectTokens tokenizes the select query into required Columns, Alias, limit value
+// where clause, aggregate functions, myFunctions, error.
+type ParseSelectTokens struct {
+ reqCols []string
+ alias string
+ myLimit int64
+ whereClause interface{}
+ aggFunctionNames []string
+ myFuncs *SelectFuncs
+ myErr error
}
// Row is a Struct for keeping track of key aspects of a row.
@@ -287,10 +54,58 @@ type Row struct {
err error
}
+// This function replaces "",'' with `` for the select parser
+func cleanExpr(expr string) string {
+ r := strings.NewReplacer("\"", "`", "'", "`")
+ return r.Replace(expr)
+}
+
+// New - initialize new select format
+func New(gr io.Reader, size int64, req ObjectSelectRequest) (s3s format.Select, err error) {
+ // Initializating options for CSV
+ if req.InputSerialization.CSV != nil {
+ if req.OutputSerialization.CSV.FieldDelimiter == "" {
+ req.OutputSerialization.CSV.FieldDelimiter = ","
+ }
+ if req.InputSerialization.CSV.FileHeaderInfo == "" {
+ req.InputSerialization.CSV.FileHeaderInfo = CSVFileHeaderInfoNone
+ }
+ if req.InputSerialization.CSV.RecordDelimiter == "" {
+ req.InputSerialization.CSV.RecordDelimiter = "\n"
+ }
+ s3s, err = csv.New(&csv.Options{
+ HasHeader: req.InputSerialization.CSV.FileHeaderInfo != CSVFileHeaderInfoNone,
+ RecordDelimiter: req.InputSerialization.CSV.RecordDelimiter,
+ FieldDelimiter: req.InputSerialization.CSV.FieldDelimiter,
+ Comments: req.InputSerialization.CSV.Comments,
+ Name: "S3Object", // Default table name for all objects
+ ReadFrom: gr,
+ Compressed: string(req.InputSerialization.CompressionType),
+ Expression: cleanExpr(req.Expression),
+ OutputFieldDelimiter: req.OutputSerialization.CSV.FieldDelimiter,
+ StreamSize: size,
+ HeaderOpt: req.InputSerialization.CSV.FileHeaderInfo == CSVFileHeaderInfoUse,
+ Progress: req.RequestProgress.Enabled,
+ })
+ } else if req.InputSerialization.JSON != nil {
+ // Initializating options for JSON
+ s3s, err = json.New(&json.Options{
+ Name: "S3Object", // Default table name for all objects
+ ReadFrom: gr,
+ Compressed: string(req.InputSerialization.CompressionType),
+ Expression: cleanExpr(req.Expression),
+ StreamSize: size,
+ Type: req.InputSerialization.JSON.Type == JSONTypeDocument,
+ Progress: req.RequestProgress.Enabled,
+ })
+ }
+ return s3s, err
+}
+
// Execute is the function where all the blocking occurs, It writes to the HTTP
// response writer in a streaming fashion so that the client can actively use
// the results before the query is finally finished executing. The
-func (reader *Input) Execute(writer io.Writer) error {
+func Execute(writer io.Writer, f format.Select) error {
myRow := make(chan *Row)
curBuf := bytes.NewBuffer(make([]byte, 1000000))
curBuf.Reset()
@@ -298,12 +113,14 @@ func (reader *Input) Execute(writer io.Writer) error {
continuationTimer := time.NewTimer(continuationTime)
defer progressTicker.Stop()
defer continuationTimer.Stop()
- go reader.runSelectParser(reader.options.Expression, myRow)
+
+ go runSelectParser(f, myRow)
+
for {
select {
case row, ok := <-myRow:
if ok && row.err != nil {
- errorMessage := reader.writeErrorMessage(row.err, curBuf)
+ errorMessage := writeErrorMessage(row.err, curBuf)
_, err := errorMessage.WriteTo(writer)
flusher, okFlush := writer.(http.Flusher)
if okFlush {
@@ -316,7 +133,7 @@ func (reader *Input) Execute(writer io.Writer) error {
close(myRow)
return nil
} else if ok {
- message := reader.writeRecordMessage(row.record, curBuf)
+ message := writeRecordMessage(row.record, curBuf)
_, err := message.WriteTo(writer)
flusher, okFlush := writer.(http.Flusher)
if okFlush {
@@ -326,17 +143,17 @@ func (reader *Input) Execute(writer io.Writer) error {
return err
}
curBuf.Reset()
- reader.stats.BytesReturned += int64(len(row.record))
+ f.UpdateBytesReturned(int64(len(row.record)))
if !continuationTimer.Stop() {
<-continuationTimer.C
}
continuationTimer.Reset(continuationTime)
} else if !ok {
- statPayload, err := reader.createStatXML()
+ statPayload, err := f.CreateStatXML()
if err != nil {
return err
}
- statMessage := reader.writeStatMessage(statPayload, curBuf)
+ statMessage := writeStatMessage(statPayload, curBuf)
_, err = statMessage.WriteTo(writer)
flusher, ok := writer.(http.Flusher)
if ok {
@@ -346,7 +163,7 @@ func (reader *Input) Execute(writer io.Writer) error {
return err
}
curBuf.Reset()
- message := reader.writeEndMessage(curBuf)
+ message := writeEndMessage(curBuf)
_, err = message.WriteTo(writer)
flusher, ok = writer.(http.Flusher)
if ok {
@@ -360,12 +177,12 @@ func (reader *Input) Execute(writer io.Writer) error {
case <-progressTicker.C:
// Send progress messages only if requested by client.
- if reader.options.Progress {
- progressPayload, err := reader.createProgressXML()
+ if f.Progress() {
+ progressPayload, err := f.CreateProgressXML()
if err != nil {
return err
}
- progressMessage := reader.writeProgressMessage(progressPayload, curBuf)
+ progressMessage := writeProgressMessage(progressPayload, curBuf)
_, err = progressMessage.WriteTo(writer)
flusher, ok := writer.(http.Flusher)
if ok {
@@ -377,7 +194,7 @@ func (reader *Input) Execute(writer io.Writer) error {
curBuf.Reset()
}
case <-continuationTimer.C:
- message := reader.writeContinuationMessage(curBuf)
+ message := writeContinuationMessage(curBuf)
_, err := message.WriteTo(writer)
flusher, ok := writer.(http.Flusher)
if ok {
diff --git a/pkg/s3select/output.go b/pkg/s3select/output.go
index b40048e69..1ed433de0 100644
--- a/pkg/s3select/output.go
+++ b/pkg/s3select/output.go
@@ -282,7 +282,7 @@ func writeProgressHeader() []byte {
// writeRecordMessage is the function which constructs the binary message for a
// record message to be sent.
-func (csvOutput *Input) writeRecordMessage(payload string, currentMessage *bytes.Buffer) *bytes.Buffer {
+func writeRecordMessage(payload string, currentMessage *bytes.Buffer) *bytes.Buffer {
// The below are the specifications of the header for a "record" event
// 11 -event type - 7 - 7 "Records"
// 13 -content-type -7 -24 "application/octet-stream"
@@ -310,7 +310,7 @@ func (csvOutput *Input) writeRecordMessage(payload string, currentMessage *bytes
// writeContinuationMessage is the function which constructs the binary message
// for a continuation message to be sent.
-func (csvOutput *Input) writeContinuationMessage(currentMessage *bytes.Buffer) *bytes.Buffer {
+func writeContinuationMessage(currentMessage *bytes.Buffer) *bytes.Buffer {
// 11 -event type - 7 - 4 "Cont"
// 13 -message-type -7 5 "event"
// This is predefined from AMZ protocol found here:
@@ -333,7 +333,7 @@ func (csvOutput *Input) writeContinuationMessage(currentMessage *bytes.Buffer) *
// writeEndMessage is the function which constructs the binary message
// for a end message to be sent.
-func (csvOutput *Input) writeEndMessage(currentMessage *bytes.Buffer) *bytes.Buffer {
+func writeEndMessage(currentMessage *bytes.Buffer) *bytes.Buffer {
// 11 -event type - 7 - 3 "End"
// 13 -message-type -7 5 "event"
// This is predefined from AMZ protocol found here:
@@ -356,7 +356,7 @@ func (csvOutput *Input) writeEndMessage(currentMessage *bytes.Buffer) *bytes.Buf
// writeStateMessage is the function which constructs the binary message for a
// state message to be sent.
-func (csvOutput *Input) writeStatMessage(payload string, currentMessage *bytes.Buffer) *bytes.Buffer {
+func writeStatMessage(payload string, currentMessage *bytes.Buffer) *bytes.Buffer {
// 11 -event type - 7 - 5 "Stat" 20
// 13 -content-type -7 -8 "text/xml" 25
// 13 -message-type -7 5 "event" 22
@@ -384,7 +384,7 @@ func (csvOutput *Input) writeStatMessage(payload string, currentMessage *bytes.B
// writeProgressMessage is the function which constructs the binary message for
// a progress message to be sent.
-func (csvOutput *Input) writeProgressMessage(payload string, currentMessage *bytes.Buffer) *bytes.Buffer {
+func writeProgressMessage(payload string, currentMessage *bytes.Buffer) *bytes.Buffer {
// The below are the specifications of the header for a "Progress" event
// 11 -event type - 7 - 8 "Progress" 23
// 13 -content-type -7 -8 "text/xml" 25
@@ -413,7 +413,7 @@ func (csvOutput *Input) writeProgressMessage(payload string, currentMessage *byt
// writeErrorMessage is the function which constructs the binary message for a
// error message to be sent.
-func (csvOutput *Input) writeErrorMessage(errorMessage error, currentMessage *bytes.Buffer) *bytes.Buffer {
+func writeErrorMessage(errorMessage error, currentMessage *bytes.Buffer) *bytes.Buffer {
// The below are the specifications of the header for a "error" event
// 11 -error-code - 7 - DEFINED "DEFINED"
diff --git a/pkg/s3select/select.go b/pkg/s3select/select.go
index 38b4e7fc5..b4c7398b2 100644
--- a/pkg/s3select/select.go
+++ b/pkg/s3select/select.go
@@ -17,10 +17,13 @@
package s3select
import (
+ "encoding/json"
"math"
+ "sort"
"strconv"
"strings"
+ "github.com/minio/minio/pkg/s3select/format"
"github.com/xwb1989/sqlparser"
)
@@ -33,8 +36,8 @@ type SelectFuncs struct {
// RunSqlParser allows us to easily bundle all the functions from above and run
// them in the appropriate order.
-func (reader *Input) runSelectParser(selectExpression string, myRow chan *Row) {
- reqCols, alias, myLimit, whereClause, aggFunctionNames, myFuncs, myErr := reader.ParseSelect(selectExpression)
+func runSelectParser(f format.Select, myRow chan *Row) {
+ reqCols, alias, myLimit, whereClause, aggFunctionNames, myFuncs, myErr := ParseSelect(f)
if myErr != nil {
rowStruct := &Row{
err: myErr,
@@ -42,23 +45,26 @@ func (reader *Input) runSelectParser(selectExpression string, myRow chan *Row) {
myRow <- rowStruct
return
}
- reader.processSelectReq(reqCols, alias, whereClause, myLimit, aggFunctionNames, myRow, myFuncs)
+ processSelectReq(reqCols, alias, whereClause, myLimit, aggFunctionNames, myRow, myFuncs, f)
+
}
// ParseSelect parses the SELECT expression, and effectively tokenizes it into
// its separate parts. It returns the requested column names,alias,limit of
// records, and the where clause.
-func (reader *Input) ParseSelect(sqlInput string) ([]string, string, int64, interface{}, []string, *SelectFuncs, error) {
+func ParseSelect(f format.Select) ([]string, string, int64, interface{}, []string, *SelectFuncs, error) {
// return columnNames, alias, limitOfRecords, whereclause,coalStore, nil
- stmt, err := sqlparser.Parse(sqlInput)
- var whereClause interface{}
- var alias string
- var limit int64
- myFuncs := &SelectFuncs{}
+
+ stmt, err := sqlparser.Parse(cleanExpr(f.Expression()))
// TODO Maybe can parse their errors a bit to return some more of the s3 errors
if err != nil {
return nil, "", 0, nil, nil, nil, ErrLexerInvalidChar
}
+
+ var whereClause interface{}
+ var alias string
+ var limit int64
+ myFuncs := &SelectFuncs{}
switch stmt := stmt.(type) {
case *sqlparser.Select:
// evaluates the where clause
@@ -146,7 +152,7 @@ func (reader *Input) ParseSelect(sqlInput string) ([]string, string, int64, inte
if stmt.OrderBy != nil {
return nil, "", 0, nil, nil, nil, ErrParseUnsupportedToken
}
- if err := reader.parseErrs(columnNames, whereClause, alias, myFuncs); err != nil {
+ if err := parseErrs(columnNames, whereClause, alias, myFuncs, f); err != nil {
return nil, "", 0, nil, nil, nil, err
}
return columnNames, alias, limit, whereClause, functionNames, myFuncs, nil
@@ -157,13 +163,13 @@ func (reader *Input) ParseSelect(sqlInput string) ([]string, string, int64, inte
// This is the main function, It goes row by row and for records which validate
// the where clause it currently prints the appropriate row given the requested
// columns.
-func (reader *Input) processSelectReq(reqColNames []string, alias string, whereClause interface{}, limitOfRecords int64, functionNames []string, myRow chan *Row, myFunc *SelectFuncs) {
+func processSelectReq(reqColNames []string, alias string, whereClause interface{}, limitOfRecords int64, functionNames []string, myRow chan *Row, myFunc *SelectFuncs, f format.Select) {
counter := -1
+ var columns []string
filtrCount := 0
functionFlag := false
// My values is used to store our aggregation values if we need to store them.
myAggVals := make([]float64, len(reqColNames))
- var columns []string
// LowercasecolumnsMap is used in accordance with hasDuplicates so that we can
// raise the error "Ambigious" if a case insensitive column is provided and we
// have multiple matches.
@@ -174,23 +180,35 @@ func (reader *Input) processSelectReq(reqColNames []string, alias string, whereC
if limitOfRecords == 0 {
limitOfRecords = math.MaxInt64
}
-
for {
- record := reader.ReadRecord()
- reader.stats.BytesProcessed += processSize(record)
+ record, err := f.Read()
+ if err != nil {
+ rowStruct := &Row{
+ err: err,
+ }
+ myRow <- rowStruct
+ return
+ }
if record == nil {
if functionFlag {
rowStruct := &Row{
- record: reader.aggFuncToStr(myAggVals) + "\n",
+ record: aggFuncToStr(myAggVals, f) + "\n",
}
myRow <- rowStruct
}
close(myRow)
return
}
- if counter == -1 && reader.options.HeaderOpt && len(reader.header) > 0 {
- columns = reader.Header()
+
+ out, _ := json.Marshal(record)
+ f.UpdateBytesProcessed(record)
+
+ if counter == -1 && f.HasHeader() && len(f.Header()) > 0 {
+ columns = f.Header()
myErr := checkForDuplicates(columns, columnsMap, hasDuplicates, lowercaseColumnsMap)
+ if format.IsInt(reqColNames[0]) {
+ myErr = ErrMissingHeaders
+ }
if myErr != nil {
rowStruct := &Row{
err: myErr,
@@ -198,17 +216,21 @@ func (reader *Input) processSelectReq(reqColNames []string, alias string, whereC
myRow <- rowStruct
return
}
- } else if counter == -1 && len(reader.header) > 0 {
- columns = reader.Header()
+ } else if counter == -1 && len(f.Header()) > 0 {
+ columns = f.Header()
+ for i := 0; i < len(columns); i++ {
+ columnsMap["_"+strconv.Itoa(i)] = i
+ }
+
}
- // When we have reached our limit, on what the user specified as the number
- // of rows they wanted, we terminate our interpreter.
+ // Return in case the number of record reaches the LIMIT defined in select query
if int64(filtrCount) == limitOfRecords && limitOfRecords != 0 {
close(myRow)
return
}
+
// The call to the where function clause,ensures that the rows we print match our where clause.
- condition, myErr := matchesMyWhereClause(record, columnsMap, alias, whereClause)
+ condition, myErr := matchesMyWhereClause(record, alias, whereClause)
if myErr != nil {
rowStruct := &Row{
err: myErr,
@@ -219,25 +241,33 @@ func (reader *Input) processSelectReq(reqColNames []string, alias string, whereC
if condition {
// if its an asterix we just print everything in the row
if reqColNames[0] == "*" && functionNames[0] == "" {
- rowStruct := &Row{
- record: reader.printAsterix(record) + "\n",
+ var row *Row
+ switch f.Type() {
+ case format.CSV:
+ row = &Row{
+ record: strings.Join(convertToSlice(columnsMap, record, string(out)), f.OutputFieldDelimiter()) + "\n",
+ }
+ case format.JSON:
+ row = &Row{
+ record: string(out) + "\n",
+ }
}
- myRow <- rowStruct
+ myRow <- row
+
} else if alias != "" {
// This is for dealing with the case of if we have to deal with a
// request for a column with an index e.g A_1.
- if representsInt(reqColNames[0]) {
+ if format.IsInt(reqColNames[0]) {
// This checks whether any aggregation function was called as now we
- // no longer will go through printing each row, and only print at the
- // end
+ // no longer will go through printing each row, and only print at the end
if len(functionNames) > 0 && functionNames[0] != "" {
functionFlag = true
- aggregationFunctions(counter, filtrCount, myAggVals, columnsMap, reqColNames, functionNames, record)
+ aggregationFunctions(counter, filtrCount, myAggVals, reqColNames, functionNames, string(out))
} else {
// The code below finds the appropriate columns of the row given the
// indicies provided in the SQL request and utilizes the map to
// retrieve the correct part of the row.
- myQueryRow, myErr := reader.processColNameIndex(record, reqColNames, columns)
+ myQueryRow, myErr := processColNameIndex(string(out), reqColNames, columns, f)
if myErr != nil {
rowStruct := &Row{
err: myErr,
@@ -255,12 +285,12 @@ func (reader *Input) processSelectReq(reqColNames []string, alias string, whereC
// form of acutal names rather an indices.
if len(functionNames) > 0 && functionNames[0] != "" {
functionFlag = true
- aggregationFunctions(counter, filtrCount, myAggVals, columnsMap, reqColNames, functionNames, record)
+ aggregationFunctions(counter, filtrCount, myAggVals, reqColNames, functionNames, string(out))
} else {
// This code prints the appropriate part of the row given the filter
// and select request, if the select request was based on column
// names rather than indices.
- myQueryRow, myErr := reader.processColNameLiteral(record, reqColNames, columns, columnsMap, myFunc)
+ myQueryRow, myErr := processColNameLiteral(string(out), reqColNames, myFunc, f)
if myErr != nil {
rowStruct := &Row{
err: myErr,
@@ -281,75 +311,73 @@ func (reader *Input) processSelectReq(reqColNames []string, alias string, whereC
}
}
-// printAsterix helps to print out the entire row if an asterix is used.
-func (reader *Input) printAsterix(record []string) string {
- return strings.Join(record, reader.options.OutputFieldDelimiter)
-}
-
// processColumnNames is a function which allows for cleaning of column names.
-func (reader *Input) processColumnNames(reqColNames []string, alias string) error {
- for i := 0; i < len(reqColNames); i++ {
- // The code below basically cleans the column name of its alias and other
- // syntax, so that we can extract its pure name.
- reqColNames[i] = cleanCol(reqColNames[i], alias)
+func processColumnNames(reqColNames []string, alias string, f format.Select) error {
+ switch f.Type() {
+ case format.CSV:
+ for i := 0; i < len(reqColNames); i++ {
+ // The code below basically cleans the column name of its alias and other
+ // syntax, so that we can extract its pure name.
+ reqColNames[i] = cleanCol(reqColNames[i], alias)
+ }
+ case format.JSON:
+ // JSON doesnt have columns so no cleaning required
}
+
return nil
}
// processColNameIndex is the function which creates the row for an index based
// query.
-func (reader *Input) processColNameIndex(record []string, reqColNames []string, columns []string) (string, error) {
+func processColNameIndex(record string, reqColNames []string, columns []string, f format.Select) (string, error) {
row := make([]string, len(reqColNames))
for i := 0; i < len(reqColNames); i++ {
// COALESCE AND NULLIF do not support index based access.
if reqColNames[0] == "0" {
- return "", ErrInvalidColumnIndex
+ return "", format.ErrInvalidColumnIndex
}
- // Subtract 1 because AWS Indexing is not 0 based, it starts at 1.
mytempindex, err := strconv.Atoi(reqColNames[i])
+ if mytempindex > len(columns) {
+ return "", format.ErrInvalidColumnIndex
+ }
+
if err != nil {
return "", ErrMissingHeaders
}
- mytempindex = mytempindex - 1
- if mytempindex > len(columns) {
- return "", ErrInvalidColumnIndex
- }
- row[i] = record[mytempindex]
+ // Subtract 1 because AWS Indexing is not 0 based, it starts at 1 generating the key like "_1".
+ row[i] = jsonValue(string("_"+strconv.Itoa(mytempindex-1)), record)
}
- rowStr := strings.Join(row, reader.options.OutputFieldDelimiter)
- if len(rowStr) > 1000000 {
+ rowStr := strings.Join(row, f.OutputFieldDelimiter())
+ if len(rowStr) > MaxCharsPerRecord {
return "", ErrOverMaxRecordSize
}
+
return rowStr, nil
}
// processColNameLiteral is the function which creates the row for an name based
// query.
-func (reader *Input) processColNameLiteral(record []string, reqColNames []string, columns []string, columnsMap map[string]int, myFunc *SelectFuncs) (string, error) {
+func processColNameLiteral(record string, reqColNames []string, myFunc *SelectFuncs, f format.Select) (string, error) {
row := make([]string, len(reqColNames))
for i := 0; i < len(reqColNames); i++ {
// this is the case to deal with COALESCE.
if reqColNames[i] == "" && isValidFunc(myFunc.index, i) {
- row[i] = evaluateFuncExpr(myFunc.funcExpr[i], "", record, columnsMap)
+ row[i] = evaluateFuncExpr(myFunc.funcExpr[i], "", record)
continue
}
- myTempIndex, notFound := columnsMap[trimQuotes(reqColNames[i])]
- if !notFound {
- return "", ErrMissingHeaders
- }
- row[i] = record[myTempIndex]
+ row[i] = jsonValue(reqColNames[i], record)
}
- rowStr := strings.Join(row, reader.options.OutputFieldDelimiter)
- if len(rowStr) > 1000000 {
+ rowStr := strings.Join(row, f.OutputFieldDelimiter())
+ if len(rowStr) > MaxCharsPerRecord {
return "", ErrOverMaxRecordSize
}
return rowStr, nil
}
-// aggregationFunctions performs the actual aggregation methods on the
-// given row, it uses an array defined for the main parsing function
-// to keep track of values.
-func aggregationFunctions(counter int, filtrCount int, myAggVals []float64, columnsMap map[string]int, storeReqCols []string, storeFunctions []string, record []string) error {
+// aggregationFunctions is a function which performs the actual aggregation
+// methods on the given row, it uses an array defined in the main parsing
+// function to keep track of values.
+func aggregationFunctions(counter int, filtrCount int, myAggVals []float64, storeReqCols []string, storeFunctions []string, record string) error {
for i := 0; i < len(storeFunctions); i++ {
if storeFunctions[i] == "" {
i++
@@ -358,15 +386,13 @@ func aggregationFunctions(counter int, filtrCount int, myAggVals []float64, colu
} else {
// If column names are provided as an index it'll use this if statement instead of the else/
var convAggFloat float64
- if representsInt(storeReqCols[i]) {
- colIndex, _ := strconv.Atoi(storeReqCols[i])
- // colIndex is 1-based
- convAggFloat, _ = strconv.ParseFloat(record[colIndex-1], 64)
+ if format.IsInt(storeReqCols[i]) {
+ myIndex, _ := strconv.Atoi(storeReqCols[i])
+ convAggFloat, _ = strconv.ParseFloat(jsonValue(string("_"+strconv.Itoa(myIndex)), record), 64)
} else {
// case that the columns are in the form of named columns rather than indices.
- convAggFloat, _ = strconv.ParseFloat(record[columnsMap[trimQuotes(storeReqCols[i])]], 64)
-
+ convAggFloat, _ = strconv.ParseFloat(jsonValue(storeReqCols[i], record), 64)
}
// This if statement is for calculating the min.
if storeFunctions[i] == "min" {
@@ -404,3 +430,25 @@ func aggregationFunctions(counter int, filtrCount int, myAggVals []float64, colu
}
return nil
}
+
+// convertToSlice takes the map[string]interface{} and convert it to []string
+func convertToSlice(columnsMap map[string]int, record map[string]interface{}, marshalledRecord string) []string {
+ var result []string
+ type kv struct {
+ Key string
+ Value int
+ }
+ var ss []kv
+ for k, v := range columnsMap {
+ ss = append(ss, kv{k, v})
+ }
+ sort.Slice(ss, func(i, j int) bool {
+ return ss[i].Value < ss[j].Value
+ })
+ for _, kv := range ss {
+ if _, ok := record[kv.Key]; ok {
+ result = append(result, jsonValue(kv.Key, marshalledRecord))
+ }
+ }
+ return result
+}
diff --git a/pkg/s3select/select_test.go b/pkg/s3select/select_test.go
index 92281664f..f0e8d3bba 100644
--- a/pkg/s3select/select_test.go
+++ b/pkg/s3select/select_test.go
@@ -17,10 +17,11 @@
package s3select
import (
- "bytes"
"fmt"
"reflect"
"testing"
+
+ "github.com/minio/minio/pkg/s3select/format"
)
// Unit Test for the checkForDuplicates function.
@@ -35,6 +36,7 @@ func TestCheckForDuplicates(t *testing.T) {
{[]string{"name", "id", "last_name", "last_name"}, make(map[string]int), make(map[string]bool), make(map[string]int), ErrAmbiguousFieldName},
{[]string{"name", "id", "last_name", "another_name"}, make(map[string]int), make(map[string]bool), make(map[string]int), nil},
}
+
for _, table := range tables {
err := checkForDuplicates(table.myReq, table.myHeaders, table.myDup, table.myLow)
if err != table.myErr {
@@ -43,106 +45,14 @@ func TestCheckForDuplicates(t *testing.T) {
}
}
-// Test for the function which processes columnnames to make sure that they are
-// compatible with spaces.
-func TestMyProcessing(t *testing.T) {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("Here , is, a, string + \n + random,random,stuff,stuff ")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- tables := []struct {
- myReq []string
- myHeaders map[string]int
- myDup map[string]bool
- myLow map[string]int
- myOpts *Options
- input *Input
- length int
- testOutput string
- myErr error
- }{
- {[]string{"name", "id", "last_name", "CAST"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, s3s, 4, "CAST", nil},
- {[]string{"name", "id", "last_name", "another_name"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, s3s, 4, "another_name", nil},
- {[]string{"name", "id", "last_name", "another_name"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, s3s, 4, "another_name", nil},
- {[]string{"name", "id", "random_name", "fame_name", "another_col"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, s3s, 5, "fame_name", nil},
- }
- for _, table := range tables {
- err = checkForDuplicates(table.myReq, table.myHeaders, table.myDup, table.myLow)
- if err != table.myErr {
- t.Error()
- }
- if len(table.myReq) != table.length {
- t.Errorf("UnexpectedError")
- }
- if table.myReq[3] != table.testOutput {
- t.Error()
- }
- }
-}
-
-// TestMyRowIndexResults is a unit test which makes sure that the rows that are
-// being printed are appropriate to the query being requested.
-func TestMyRowIndexResults(t *testing.T) {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("Here , is, a, string + \n + random,random,stuff,stuff ")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- tables := []struct {
- myReq []string
- myHeaders map[string]int
- myDup map[string]bool
- myLow map[string]int
- myOpts *Options
- input *Input
- myRecord []string
- myTarget string
- myAsterix string
- columns []string
- err error
- }{
- {[]string{"1", "2"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, s3s, []string{"target", "random", "hello", "stuff"}, "target,random", "target,random,hello,stuff", []string{"1", "2", "3", "4"}, nil},
- {[]string{"2", "3", "4"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, s3s, []string{"random", "hullo", "thing", "stuff"}, "hullo,thing,stuff", "random,hullo,thing,stuff", []string{"1", "2", "3", "4"}, nil},
- {[]string{"3", "2"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, s3s, []string{"random", "hullo", "thing", "stuff"}, "thing,hullo", "random,hullo,thing,stuff", []string{"1", "2", "3", "4"}, nil},
- {[]string{"11", "1"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, s3s, []string{"random", "hullo", "thing", "stuff"}, "", "random,hullo,thing,stuff", []string{"1", "2", "3", "4"}, ErrInvalidColumnIndex},
- }
- for _, table := range tables {
- checkForDuplicates(table.columns, table.myHeaders, table.myDup, table.myLow)
- myRow, err := s3s.processColNameIndex(table.myRecord, table.myReq, table.columns)
- if err != table.err {
- t.Error()
- }
- if myRow != table.myTarget {
- t.Error()
- }
- myRow = table.input.printAsterix(table.myRecord)
- if myRow != table.myAsterix {
- t.Error()
+// This function returns the index of a string in a list
+func stringIndex(a string, list []string) int {
+ for i, v := range list {
+ if v == a {
+ return i
}
}
+ return -1
}
// TestMyHelperFunctions is a unit test which tests some small helper string
@@ -159,7 +69,7 @@ func TestMyHelperFunctions(t *testing.T) {
{"test3", []string{"test1", "test2", "test3", "test4", "test5"}, 2, true},
}
for _, table := range tables {
- if stringInSlice(table.myReq, table.myList) != table.expected {
+ if format.StringInSlice(table.myReq, table.myList) != table.expected {
t.Error()
}
if stringIndex(table.myReq, table.myList) != table.myIndex {
@@ -233,82 +143,6 @@ func TestMyConversion(t *testing.T) {
}
}
-// Unit Tests for Parser.
-func TestMyParser(t *testing.T) {
- tables := []struct {
- myQuery string
- err error
- reqCols []string
- alias string
- myLimit int
- aggFuncs []string
- header []string
- }{
- {"SELECT * FROM S3OBJECT", nil, []string{"*"}, "S3OBJECT", 0, make([]string, 1), []string{"name1", "name2", "name3", "name4"}},
- {"SELECT * FROM S3OBJECT AS A", nil, []string{"*"}, "A", 0, make([]string, 1), []string{"name1", "name2", "name3", "name4"}},
- {"SELECT col_name FROM S3OBJECT AS A", nil, []string{"col_name"}, "A", 0, make([]string, 1), []string{"col_name", "name2", "name3", "name4"}},
- {"SELECT col_name,col_other FROM S3OBJECT AS A LIMIT 5", nil, []string{"col_name", "col_other"}, "A", 5, make([]string, 2), []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT col_name,col_other FROM S3OBJECT AS A WHERE col_name = 'Name' LIMIT 5", nil, []string{"col_name", "col_other"}, "A", 5, make([]string, 2), []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT col_name,col_other FROM S3OBJECT AS A WHERE col_name = 'Name LIMIT 5", ErrLexerInvalidChar, nil, "", 0, nil, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT count(*) FROM S3OBJECT AS A WHERE col_name = 'Name' LIMIT 5", nil, []string{"*"}, "A", 5, []string{"count"}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT sum(col_name),sum(col_other) FROM S3OBJECT AS A WHERE col_name = 'Name' LIMIT 5", nil, []string{"col_name", "col_other"}, "A", 5, []string{"sum", "sum"}, []string{"col_name", "col_other"}},
- {"SELECT A.col_name FROM S3OBJECT AS A", nil, []string{"col_name"}, "A", 0, make([]string, 1), []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT A.`col name` FROM S3OBJECT AS A", nil, []string{"col_name"}, "A", 0, make([]string, 1), []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT A._col_name FROM S3OBJECT AS A", nil, []string{"col_name"}, "A", 0, make([]string, 1), []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT A._col_name FROM S3OBJECT AS A WHERE randomname > 5", ErrMissingHeaders, nil, "", 0, nil, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT A._col_name FROM S3OBJECT AS A WHERE A._11 > 5", ErrInvalidColumnIndex, nil, "", 0, nil, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT COALESCE(col_name,col_other) FROM S3OBJECT AS A WHERE A._3 > 5", nil, []string{""}, "A", 0, []string{""}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT COALESCE(col_name,col_other),COALESCE(col_name,col_other) FROM S3OBJECT AS A WHERE A._3 > 5", nil, []string{"", ""}, "A", 0, []string{"", ""}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT COALESCE(col_name,col_other) ,col_name , COALESCE(col_name,col_other) FROM S3OBJECT AS A WHERE col_name > 5", nil, []string{"", "col_name", ""}, "A", 0, []string{"", "", ""}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT NULLIF(col_name,col_other) ,col_name , COALESCE(col_name,col_other) FROM S3OBJECT AS A WHERE col_name > 5", nil, []string{"", "col_name", ""}, "A", 0, []string{"", "", ""}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT NULLIF(col_name,col_other) FROM S3OBJECT AS A WHERE col_name > 5", nil, []string{""}, "A", 0, []string{""}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT NULLIF(randomname,col_other) FROM S3OBJECT AS A WHERE col_name > 5", ErrMissingHeaders, nil, "", 0, nil, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT col_name FROM S3OBJECT AS A WHERE COALESCE(random,5) > 5", ErrMissingHeaders, nil, "", 0, nil, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT col_name FROM S3OBJECT AS A WHERE NULLIF(random,5) > 5", ErrMissingHeaders, nil, "", 0, nil, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT col_name FROM S3OBJECT AS A WHERE LOWER(col_name) BETWEEN 5 AND 7", nil, []string{"col_name"}, "A", 0, []string{""}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT UPPER(col_name) FROM S3OBJECT AS A WHERE LOWER(col_name) BETWEEN 5 AND 7", nil, []string{""}, "A", 0, []string{""}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT UPPER(*) FROM S3OBJECT AS A WHERE LOWER(col_name) BETWEEN 5 AND 7", ErrParseUnsupportedCallWithStar, nil, "", 0, nil, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT NULLIF(col_name,col_name) FROM S3OBJECT AS A WHERE NULLIF(LOWER(col_name),col_name) BETWEEN 5 AND 7", nil, []string{""}, "A", 0, []string{""}, []string{"col_name", "col_other", "name3", "name4"}},
- {"SELECT COALESCE(col_name,col_name) FROM S3OBJECT AS A WHERE NULLIF(LOWER(col_name),col_name) BETWEEN 5 AND 7", nil, []string{""}, "A", 0, []string{""}, []string{"col_name", "col_other", "name3", "name4"}},
- }
- for _, table := range tables {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("name1,name2,name3,name4" + "\n" + "5,is,a,string" + "\n" + "random,random,stuff,stuff")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- s3s.header = table.header
- reqCols, alias, myLimit, _, aggFunctionNames, _, err := s3s.ParseSelect(table.myQuery)
- if table.err != err {
- t.Error()
- }
- if !reflect.DeepEqual(reqCols, table.reqCols) {
- t.Error()
- }
- if alias != table.alias {
- t.Error()
- }
- if myLimit != int64(table.myLimit) {
- t.Error()
- }
- if !reflect.DeepEqual(table.aggFuncs, aggFunctionNames) {
- t.Error()
- }
- }
-}
-
// Unit tests for the main function that performs aggreggation.
func TestMyAggregationFunc(t *testing.T) {
columnsMap := make(map[string]int)
@@ -321,21 +155,22 @@ func TestMyAggregationFunc(t *testing.T) {
columnsMap map[string]int
storeReqCols []string
storeFunctions []string
- record []string
+ record string
err error
expectedVal float64
}{
- {10, 5, []float64{10}, columnsMap, []string{"Col1"}, []string{"count"}, []string{"1", "2"}, nil, 11},
- {10, 5, []float64{10}, columnsMap, []string{"Col1"}, []string{"min"}, []string{"1", "2"}, nil, 1},
- {10, 5, []float64{10}, columnsMap, []string{"Col1"}, []string{"max"}, []string{"1", "2"}, nil, 10},
- {10, 5, []float64{10}, columnsMap, []string{"Col1"}, []string{"sum"}, []string{"1", "2"}, nil, 11},
- {1, 1, []float64{10}, columnsMap, []string{"Col1"}, []string{"avg"}, []string{"1", "2"}, nil, 5.500},
- {10, 5, []float64{0.000}, columnsMap, []string{"Col1"}, []string{"random"}, []string{"1", "2"}, ErrParseNonUnaryAgregateFunctionCall, 0},
- {0, 5, []float64{0}, columnsMap, []string{"0"}, []string{"count"}, []string{"1", "2"}, nil, 1},
- {10, 5, []float64{10}, columnsMap, []string{"1"}, []string{"min"}, []string{"1", "12"}, nil, 1},
+ {10, 5, []float64{10, 11, 12, 13, 14}, columnsMap, []string{"Col1"}, []string{"count"}, "{\"Col1\":\"1\",\"Col2\":\"2\"}", nil, 11},
+ {10, 5, []float64{10}, columnsMap, []string{"Col1"}, []string{"min"}, "{\"Col1\":\"1\",\"Col2\":\"2\"}", nil, 1},
+ {10, 5, []float64{10}, columnsMap, []string{"Col1"}, []string{"max"}, "{\"Col1\":\"1\",\"Col2\":\"2\"}", nil, 10},
+ {10, 5, []float64{10}, columnsMap, []string{"Col1"}, []string{"sum"}, "{\"Col1\":\"1\",\"Col2\":\"2\"}", nil, 11},
+ {1, 1, []float64{10}, columnsMap, []string{"Col1"}, []string{"avg"}, "{\"Col1\":\"1\",\"Col2\":\"2\"}", nil, 5.500},
+ {10, 5, []float64{0.0000}, columnsMap, []string{"Col1"}, []string{"random"}, "{\"Col1\":\"1\",\"Col2\":\"2\"}", ErrParseNonUnaryAgregateFunctionCall, 0},
+ {0, 5, []float64{0}, columnsMap, []string{"0"}, []string{"count"}, "{\"Col1\":\"1\",\"Col2\":\"2\"}", nil, 1},
+ {10, 5, []float64{10}, columnsMap, []string{"1"}, []string{"min"}, "{\"_1\":\"1\",\"_2\":\"2\"}", nil, 1},
}
+
for _, table := range tables {
- err := aggregationFunctions(table.counter, table.filtrCount, table.myAggVals, table.columnsMap, table.storeReqCols, table.storeFunctions, table.record)
+ err := aggregationFunctions(table.counter, table.filtrCount, table.myAggVals, table.storeReqCols, table.storeFunctions, table.record)
if table.err != err {
t.Error()
}
@@ -346,156 +181,6 @@ func TestMyAggregationFunc(t *testing.T) {
}
}
-// Unit Tests for the function which converts a float array to string.
-func TestToStringAgg(t *testing.T) {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("Here , is, a, string + \n + random,random,stuff,stuff ")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- tables := []struct {
- myAggVal []float64
- expected string
- }{
- {[]float64{10, 11, 12, 13, 14}, "10,11,12,13,14"},
- {[]float64{10, 11.3, 12, 13, 14}, "10,11.300000,12,13,14"},
- {[]float64{10.235, 11.3, 12, 13, 14}, "10.235000,11.300000,12,13,14"},
- {[]float64{10.235, 11.3, 12.123, 13.456, 14.789}, "10.235000,11.300000,12.123000,13.456000,14.789000"},
- {[]float64{10}, "10"},
- }
- for _, table := range tables {
- val := s3s.aggFuncToStr(table.myAggVal)
- if val != table.expected {
- t.Error()
- }
- }
-}
-
-// TestMyRowColLiteralResults is a unit test which makes sure that the rows that
-// are being printed are appropriate to the query being requested.
-func TestMyRowColLiteralResults(t *testing.T) {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("Here , is, a, string + \n + random,random,stuff,stuff ")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- tables := []struct {
- myReq []string
- myHeaders map[string]int
- myDup map[string]bool
- myLow map[string]int
- myOpts *Options
- tempList []string
- input *Input
- myRecord []string
- myTarget string
- columns []string
- err error
- }{
- {[]string{"draft", "year"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, []string{"draft", "year"}, s3s, []string{"target", "random", "hello", "stuff"}, "target,random", []string{"draft", "year", "random", "another"}, nil},
- {[]string{"year", "draft"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, []string{"year", "draft"}, s3s, []string{"draft", "2012", "thing", "stuff"}, "2012,draft", []string{"draft", "year", "random", "another"}, nil},
- {[]string{"yearrandomstuff", "draft"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, []string{"yearrandomstuff", "draft"}, s3s, []string{"draft", "2012", "thing", "stuff"}, "", []string{"draft", "year", "random", "another"}, ErrMissingHeaders},
- {[]string{"draft", "randomstuff"}, make(map[string]int), make(map[string]bool), make(map[string]int), options, []string{"yearrandomstuff", "draft"}, s3s, []string{"draft", "2012", "thing", "stuff"}, "", []string{"draft", "year", "random", "another"}, ErrMissingHeaders},
- }
- for _, table := range tables {
- checkForDuplicates(table.columns, table.myHeaders, table.myDup, table.myLow)
- myRow, err := table.input.processColNameLiteral(table.myRecord, table.myReq, table.tempList, table.myHeaders, nil)
- if err != table.err {
- t.Error()
- }
- if myRow != table.myTarget {
- t.Error()
- }
- }
-}
-
-// TestMyWhereEval is a function which provides unit tests for the function
-// which evaluates the where clause.
-func TestMyWhereEval(t *testing.T) {
- columnsMap := make(map[string]int)
- columnsMap["Col1"] = 0
- columnsMap["Col2"] = 1
- tables := []struct {
- myQuery string
- record []string
- err error
- expected bool
- header []string
- }{
- {"SELECT * FROM S3OBJECT", []string{"record_1,record_2,record_3,record_4"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT WHERE Col1 < -1", []string{"0", "1"}, nil, false, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT WHERE Col1 < -1 OR Col2 > 15", []string{"151", "12"}, nil, false, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT WHERE Col1 > -1 AND Col2 > 15", []string{"151", "12"}, nil, false, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT WHERE Col1 > 1.00", []string{"151.0000", "12"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT WHERE Col1 > 100", []string{"random", "12"}, nil, false, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT WHERE Col1 BETWEEN 100 AND 0", []string{"151", "12"}, nil, false, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT WHERE Col1 BETWEEN 100.0 AND 0.0", []string{"151", "12"}, nil, false, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT AS A WHERE A.1 BETWEEN 160 AND 150", []string{"151", "12"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT AS A WHERE A._1 BETWEEN 160 AND 0", []string{"151", "12"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT AS A WHERE A._1 BETWEEN 0 AND 160", []string{"151", "12"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT A._1 LIKE 'r%'", []string{"record_1,record_2,record_3,record_4"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT s._2 FROM S3Object s WHERE s._2 = 'Steven'", []string{"record_1", "Steven", "Steven", "record_4"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT AS A WHERE Col1 BETWEEN 0 AND 160", []string{"151", "12"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT AS A WHERE Col1 BETWEEN 160 AND 0", []string{"151", "12"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT AS A WHERE UPPER(Col1) BETWEEN 160 AND 0", []string{"151", "12"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT AS A WHERE UPPER(Col1) = 'RANDOM'", []string{"random", "12"}, nil, true, []string{"Col1", "Col2"}},
- {"SELECT * FROM S3OBJECT AS A WHERE LOWER(UPPER(Col1) = 'random'", []string{"random", "12"}, nil, true, []string{"Col1", "Col2"}},
- }
- for _, table := range tables {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("name1,name2,name3,name4" + "\n" + "5,is,a,string" + "\n" + "random,random,stuff,stuff")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- s3s.header = table.header
-
- if err != nil {
- t.Error(err)
- }
- _, alias, _, whereClause, _, _, _ := s3s.ParseSelect(table.myQuery)
- myVal, err := matchesMyWhereClause(table.record, columnsMap, alias, whereClause)
- if table.err != err {
- t.Error()
- }
- if myVal != table.expected {
- t.Error()
- }
- }
-}
-
// TestMyStringComparator is a unit test which ensures that the appropriate
// values are being compared for strings.
func TestMyStringComparator(t *testing.T) {
@@ -594,231 +279,13 @@ func TestMySizeFunction(t *testing.T) {
{[]string{"test1", "test2", "test3", "test4", "test5"}, 30},
}
for _, table := range tables {
- if processSize(table.myRecord) != table.expected {
+ if format.ProcessSize(table.myRecord) != table.expected {
t.Error()
}
}
}
-// TestInterpreter is a function which provides unit testing for the main
-// interpreter function.
-func TestInterpreter(t *testing.T) {
- tables := []struct {
- myQuery string
- myChan chan *Row
- err error
- header []string
- }{
- {"Select random from S3OBJECT", make(chan *Row), ErrMissingHeaders, []string{"name1", "name2", "name3", "name4"}},
- {"Select * from S3OBJECT as A WHERE name2 > 5.00", make(chan *Row), nil, []string{"name1", "name2", "name3", "name4"}},
- {"Select * from S3OBJECT", make(chan *Row), nil, []string{"name1", "name2", "name3", "name4"}},
- {"Select A_1 from S3OBJECT as A", make(chan *Row), nil, []string{"1", "2", "3", "4"}},
- {"Select count(*) from S3OBJECT", make(chan *Row), nil, []string{"name1", "name2", "name3", "name4"}},
- {"Select * from S3OBJECT WHERE name1 > 5.00", make(chan *Row), nil, []string{"name1", "name2", "name3", "name4"}},
- }
- for _, table := range tables {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("name1,name2,name3,name4" + "\n" + "5,is,a,string" + "\n" + "random,random,stuff,stuff")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- s3s.header = table.header
- reqCols, alias, myLimit, whereClause, aggFunctionNames, _, err := s3s.ParseSelect(table.myQuery)
- if err != table.err {
- t.Fatal()
- }
- if err == nil {
- go s3s.processSelectReq(reqCols, alias, whereClause, myLimit, aggFunctionNames, table.myChan, nil)
- select {
- case row, ok := <-table.myChan:
- if ok && len(row.record) > 0 {
- } else if ok && row.err != nil {
- if row.err != table.err {
- t.Error()
- }
- close(table.myChan)
- } else if !ok {
- }
- }
- }
- }
-}
-
-// TestMyXMLFunction is a function that provides unit testing for the XML
-// creating function.
-func TestMyXMLFunction(t *testing.T) {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("name1,name2,name3,name4" + "\n" + "5,is,a,string" + "\n" + "random,random,stuff,stuff")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- tables := []struct {
- expectedStat int
- expectedProgress int
- }{
- {150, 156},
- }
- for _, table := range tables {
- myVal, _ := s3s.createStatXML()
- myOtherVal, _ := s3s.createProgressXML()
- if len(myVal) != table.expectedStat {
- t.Error()
- }
- if len(myOtherVal) != table.expectedProgress {
- fmt.Println(len(myOtherVal))
- t.Error()
- }
- }
-}
-
-// TestMyProtocolFunction is a function which provides unit testing for several
-// of the functions which write the binary protocol.
-func TestMyProtocolFunction(t *testing.T) {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("name1,name2,name3,name4" + "\n" + "5,is,a,string" + "\n" + "random,random,stuff,stuff")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- tables := []struct {
- payloadMsg string
- expectedRecord int
- expectedEnd int
- }{
- {"random payload", 115, 56},
- }
- for _, table := range tables {
- var currentMessage = &bytes.Buffer{}
- if len(s3s.writeRecordMessage(table.payloadMsg, currentMessage).Bytes()) != table.expectedRecord {
- t.Error()
- }
- currentMessage.Reset()
- if len(s3s.writeEndMessage(currentMessage).Bytes()) != table.expectedEnd {
- t.Error()
- }
- currentMessage.Reset()
- if len(s3s.writeContinuationMessage(currentMessage).Bytes()) != 57 {
- t.Error()
- }
- currentMessage.Reset()
- }
-}
-
-// TestMyInfoProtocolFunctions is a function which provides unit testing for the
-// stat and progress messages of the protocols.
-func TestMyInfoProtocolFunctions(t *testing.T) {
- options := &Options{
- HasHeader: true,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("name1,name2,name3,name4" + "\n" + "5,is,a,string" + "\n" + "random,random,stuff,stuff")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- myVal, _ := s3s.createStatXML()
- myOtherVal, _ := s3s.createProgressXML()
-
- tables := []struct {
- payloadStatMsg string
- payloadProgressMsg string
- expectedStat int
- expectedProgress int
- }{
- {myVal, myOtherVal, 233, 243},
- }
- for _, table := range tables {
- var currBuf = &bytes.Buffer{}
- if len(s3s.writeStatMessage(table.payloadStatMsg, currBuf).Bytes()) != table.expectedStat {
- t.Error()
- }
- currBuf.Reset()
- if len(s3s.writeProgressMessage(table.payloadProgressMsg, currBuf).Bytes()) != table.expectedProgress {
- t.Error()
- }
- }
-}
-
-// TestMyErrorProtocolFunctions is a function which provides unit testing for
-// the error message type of protocol.
-func TestMyErrorProtocolFunctions(t *testing.T) {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("name1,name2,name3,name4" + "\n" + "5,is,a,string" + "\n" + "random,random,stuff,stuff")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- tables := []struct {
- err error
- expectedError int
- }{
- {ErrInvalidCast, 248},
- {ErrTruncatedInput, 200},
- {ErrUnsupportedSyntax, 114},
- {ErrCSVParsingError, 157},
- }
- for _, table := range tables {
- var currentMessage = &bytes.Buffer{}
- if len(s3s.writeErrorMessage(table.err, currentMessage).Bytes()) != table.expectedError {
- t.Error()
- }
-
- }
-}
func TestMatch(t *testing.T) {
testCases := []struct {
pattern string
@@ -1004,51 +471,6 @@ func TestMatch(t *testing.T) {
}
}
-// TestMyValids is a unit test which ensures that the appropriate values are
-// being returned from the isValid... functions.
-func TestMyValids(t *testing.T) {
-
- tables := []struct {
- myQuery string
- indexList []int
- myIndex int
- myValIndex bool
- header []string
- err error
- }{
- {"SELECT UPPER(NULLIF(draft_year,random_name))", []int{3, 5, 6, 7, 8, 9}, 3, true, []string{"draft_year", "random_name"}, nil},
- {"SELECT UPPER(NULLIF(draft_year,xandom_name))", []int{3, 5, 6, 7, 8, 9}, 3, true, []string{"draft_year", "random_name"}, ErrMissingHeaders},
- }
- for _, table := range tables {
- options := &Options{
- HasHeader: false,
- RecordDelimiter: "\n",
- FieldDelimiter: ",",
- Comments: "",
- Name: "S3Object", // Default table name for all objects
- ReadFrom: bytes.NewReader([]byte("name1,name2,name3,name4" + "\n" + "5,is,a,string" + "\n" + "random,random,stuff,stuff")),
- Compressed: "",
- Expression: "",
- OutputFieldDelimiter: ",",
- StreamSize: 20,
- HeaderOpt: true,
- }
- s3s, err := NewInput(options)
- if err != nil {
- t.Error(err)
- }
- s3s.header = table.header
- _, _, _, _, _, _, err = s3s.ParseSelect(table.myQuery)
- if err != table.err {
- t.Fatal()
- }
- myVal := isValidFunc(table.indexList, table.myIndex)
- if myVal != table.myValIndex {
- t.Error()
- }
- }
-}
-
// TestMyFuncProcessing is a unit test which ensures that the appropriate values are
// being returned from the Processing... functions.
func TestMyFuncProcessing(t *testing.T) {
diff --git a/vendor/github.com/json-iterator/go/Gopkg.lock b/vendor/github.com/json-iterator/go/Gopkg.lock
new file mode 100644
index 000000000..c8a9fbb38
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/Gopkg.lock
@@ -0,0 +1,21 @@
+# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
+
+
+[[projects]]
+ name = "github.com/modern-go/concurrent"
+ packages = ["."]
+ revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
+ version = "1.0.0"
+
+[[projects]]
+ name = "github.com/modern-go/reflect2"
+ packages = ["."]
+ revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
+ version = "1.0.1"
+
+[solve-meta]
+ analyzer-name = "dep"
+ analyzer-version = 1
+ inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
+ solver-name = "gps-cdcl"
+ solver-version = 1
diff --git a/vendor/github.com/json-iterator/go/Gopkg.toml b/vendor/github.com/json-iterator/go/Gopkg.toml
new file mode 100644
index 000000000..313a0f887
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/Gopkg.toml
@@ -0,0 +1,26 @@
+# Gopkg.toml example
+#
+# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
+# for detailed Gopkg.toml documentation.
+#
+# required = ["github.com/user/thing/cmd/thing"]
+# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
+#
+# [[constraint]]
+# name = "github.com/user/project"
+# version = "1.0.0"
+#
+# [[constraint]]
+# name = "github.com/user/project2"
+# branch = "dev"
+# source = "github.com/myfork/project2"
+#
+# [[override]]
+# name = "github.com/x/y"
+# version = "2.4.0"
+
+ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
+
+[[constraint]]
+ name = "github.com/modern-go/reflect2"
+ version = "1.0.1"
diff --git a/vendor/github.com/json-iterator/go/LICENSE b/vendor/github.com/json-iterator/go/LICENSE
new file mode 100644
index 000000000..2cf4f5ab2
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2016 json-iterator
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/json-iterator/go/README.md b/vendor/github.com/json-iterator/go/README.md
new file mode 100644
index 000000000..54d5afe95
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/README.md
@@ -0,0 +1,91 @@
+[](https://sourcegraph.com/github.com/json-iterator/go?badge)
+[](http://godoc.org/github.com/json-iterator/go)
+[](https://travis-ci.org/json-iterator/go)
+[](https://codecov.io/gh/json-iterator/go)
+[](https://goreportcard.com/report/github.com/json-iterator/go)
+[](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
+[](https://gitter.im/json-iterator/Lobby)
+
+A high-performance 100% compatible drop-in replacement of "encoding/json"
+
+You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
+
+```
+Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
+```
+
+# Benchmark
+
+
+
+Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
+
+Raw Result (easyjson requires static code generation)
+
+| | ns/op | allocation bytes | allocation times |
+| --- | --- | --- | --- |
+| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
+| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
+| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
+| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
+| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
+| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
+
+Always benchmark with your own workload.
+The result depends heavily on the data input.
+
+# Usage
+
+100% compatibility with standard lib
+
+Replace
+
+```go
+import "encoding/json"
+json.Marshal(&data)
+```
+
+with
+
+```go
+import "github.com/json-iterator/go"
+
+var json = jsoniter.ConfigCompatibleWithStandardLibrary
+json.Marshal(&data)
+```
+
+Replace
+
+```go
+import "encoding/json"
+json.Unmarshal(input, &data)
+```
+
+with
+
+```go
+import "github.com/json-iterator/go"
+
+var json = jsoniter.ConfigCompatibleWithStandardLibrary
+json.Unmarshal(input, &data)
+```
+
+[More documentation](http://jsoniter.com/migrate-from-go-std.html)
+
+# How to get
+
+```
+go get github.com/json-iterator/go
+```
+
+# Contribution Welcomed !
+
+Contributors
+
+* [thockin](https://github.com/thockin)
+* [mattn](https://github.com/mattn)
+* [cch123](https://github.com/cch123)
+* [Oleg Shaldybin](https://github.com/olegshaldybin)
+* [Jason Toffaletti](https://github.com/toffaletti)
+
+Report issue or pull request, or email taowen@gmail.com, or [](https://gitter.im/json-iterator/Lobby)
diff --git a/vendor/github.com/json-iterator/go/adapter.go b/vendor/github.com/json-iterator/go/adapter.go
new file mode 100644
index 000000000..e674d0f39
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/adapter.go
@@ -0,0 +1,150 @@
+package jsoniter
+
+import (
+ "bytes"
+ "io"
+)
+
+// RawMessage to make replace json with jsoniter
+type RawMessage []byte
+
+// Unmarshal adapts to json/encoding Unmarshal API
+//
+// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
+// Refer to https://godoc.org/encoding/json#Unmarshal for more information
+func Unmarshal(data []byte, v interface{}) error {
+ return ConfigDefault.Unmarshal(data, v)
+}
+
+// UnmarshalFromString convenient method to read from string instead of []byte
+func UnmarshalFromString(str string, v interface{}) error {
+ return ConfigDefault.UnmarshalFromString(str, v)
+}
+
+// Get quick method to get value from deeply nested JSON structure
+func Get(data []byte, path ...interface{}) Any {
+ return ConfigDefault.Get(data, path...)
+}
+
+// Marshal adapts to json/encoding Marshal API
+//
+// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
+// Refer to https://godoc.org/encoding/json#Marshal for more information
+func Marshal(v interface{}) ([]byte, error) {
+ return ConfigDefault.Marshal(v)
+}
+
+// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
+func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ return ConfigDefault.MarshalIndent(v, prefix, indent)
+}
+
+// MarshalToString convenient method to write as string instead of []byte
+func MarshalToString(v interface{}) (string, error) {
+ return ConfigDefault.MarshalToString(v)
+}
+
+// NewDecoder adapts to json/stream NewDecoder API.
+//
+// NewDecoder returns a new decoder that reads from r.
+//
+// Instead of a json/encoding Decoder, an Decoder is returned
+// Refer to https://godoc.org/encoding/json#NewDecoder for more information
+func NewDecoder(reader io.Reader) *Decoder {
+ return ConfigDefault.NewDecoder(reader)
+}
+
+// Decoder reads and decodes JSON values from an input stream.
+// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
+type Decoder struct {
+ iter *Iterator
+}
+
+// Decode decode JSON into interface{}
+func (adapter *Decoder) Decode(obj interface{}) error {
+ if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
+ if !adapter.iter.loadMore() {
+ return io.EOF
+ }
+ }
+ adapter.iter.ReadVal(obj)
+ err := adapter.iter.Error
+ if err == io.EOF {
+ return nil
+ }
+ return adapter.iter.Error
+}
+
+// More is there more?
+func (adapter *Decoder) More() bool {
+ iter := adapter.iter
+ if iter.Error != nil {
+ return false
+ }
+ c := iter.nextToken()
+ if c == 0 {
+ return false
+ }
+ iter.unreadByte()
+ return c != ']' && c != '}'
+}
+
+// Buffered remaining buffer
+func (adapter *Decoder) Buffered() io.Reader {
+ remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
+ return bytes.NewReader(remaining)
+}
+
+// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
+// Number instead of as a float64.
+func (adapter *Decoder) UseNumber() {
+ cfg := adapter.iter.cfg.configBeforeFrozen
+ cfg.UseNumber = true
+ adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
+}
+
+// DisallowUnknownFields causes the Decoder to return an error when the destination
+// is a struct and the input contains object keys which do not match any
+// non-ignored, exported fields in the destination.
+func (adapter *Decoder) DisallowUnknownFields() {
+ cfg := adapter.iter.cfg.configBeforeFrozen
+ cfg.DisallowUnknownFields = true
+ adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
+}
+
+// NewEncoder same as json.NewEncoder
+func NewEncoder(writer io.Writer) *Encoder {
+ return ConfigDefault.NewEncoder(writer)
+}
+
+// Encoder same as json.Encoder
+type Encoder struct {
+ stream *Stream
+}
+
+// Encode encode interface{} as JSON to io.Writer
+func (adapter *Encoder) Encode(val interface{}) error {
+ adapter.stream.WriteVal(val)
+ adapter.stream.WriteRaw("\n")
+ adapter.stream.Flush()
+ return adapter.stream.Error
+}
+
+// SetIndent set the indention. Prefix is not supported
+func (adapter *Encoder) SetIndent(prefix, indent string) {
+ config := adapter.stream.cfg.configBeforeFrozen
+ config.IndentionStep = len(indent)
+ adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
+}
+
+// SetEscapeHTML escape html by default, set to false to disable
+func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
+ config := adapter.stream.cfg.configBeforeFrozen
+ config.EscapeHTML = escapeHTML
+ adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
+}
+
+// Valid reports whether data is a valid JSON encoding.
+func Valid(data []byte) bool {
+ return ConfigDefault.Valid(data)
+}
diff --git a/vendor/github.com/json-iterator/go/any.go b/vendor/github.com/json-iterator/go/any.go
new file mode 100644
index 000000000..daecfed61
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any.go
@@ -0,0 +1,321 @@
+package jsoniter
+
+import (
+ "errors"
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "strconv"
+ "unsafe"
+)
+
+// Any generic object representation.
+// The lazy json implementation holds []byte and parse lazily.
+type Any interface {
+ LastError() error
+ ValueType() ValueType
+ MustBeValid() Any
+ ToBool() bool
+ ToInt() int
+ ToInt32() int32
+ ToInt64() int64
+ ToUint() uint
+ ToUint32() uint32
+ ToUint64() uint64
+ ToFloat32() float32
+ ToFloat64() float64
+ ToString() string
+ ToVal(val interface{})
+ Get(path ...interface{}) Any
+ Size() int
+ Keys() []string
+ GetInterface() interface{}
+ WriteTo(stream *Stream)
+}
+
+type baseAny struct{}
+
+func (any *baseAny) Get(path ...interface{}) Any {
+ return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
+}
+
+func (any *baseAny) Size() int {
+ return 0
+}
+
+func (any *baseAny) Keys() []string {
+ return []string{}
+}
+
+func (any *baseAny) ToVal(obj interface{}) {
+ panic("not implemented")
+}
+
+// WrapInt32 turn int32 into Any interface
+func WrapInt32(val int32) Any {
+ return &int32Any{baseAny{}, val}
+}
+
+// WrapInt64 turn int64 into Any interface
+func WrapInt64(val int64) Any {
+ return &int64Any{baseAny{}, val}
+}
+
+// WrapUint32 turn uint32 into Any interface
+func WrapUint32(val uint32) Any {
+ return &uint32Any{baseAny{}, val}
+}
+
+// WrapUint64 turn uint64 into Any interface
+func WrapUint64(val uint64) Any {
+ return &uint64Any{baseAny{}, val}
+}
+
+// WrapFloat64 turn float64 into Any interface
+func WrapFloat64(val float64) Any {
+ return &floatAny{baseAny{}, val}
+}
+
+// WrapString turn string into Any interface
+func WrapString(val string) Any {
+ return &stringAny{baseAny{}, val}
+}
+
+// Wrap turn a go object into Any interface
+func Wrap(val interface{}) Any {
+ if val == nil {
+ return &nilAny{}
+ }
+ asAny, isAny := val.(Any)
+ if isAny {
+ return asAny
+ }
+ typ := reflect2.TypeOf(val)
+ switch typ.Kind() {
+ case reflect.Slice:
+ return wrapArray(val)
+ case reflect.Struct:
+ return wrapStruct(val)
+ case reflect.Map:
+ return wrapMap(val)
+ case reflect.String:
+ return WrapString(val.(string))
+ case reflect.Int:
+ if strconv.IntSize == 32 {
+ return WrapInt32(int32(val.(int)))
+ }
+ return WrapInt64(int64(val.(int)))
+ case reflect.Int8:
+ return WrapInt32(int32(val.(int8)))
+ case reflect.Int16:
+ return WrapInt32(int32(val.(int16)))
+ case reflect.Int32:
+ return WrapInt32(val.(int32))
+ case reflect.Int64:
+ return WrapInt64(val.(int64))
+ case reflect.Uint:
+ if strconv.IntSize == 32 {
+ return WrapUint32(uint32(val.(uint)))
+ }
+ return WrapUint64(uint64(val.(uint)))
+ case reflect.Uintptr:
+ if ptrSize == 32 {
+ return WrapUint32(uint32(val.(uintptr)))
+ }
+ return WrapUint64(uint64(val.(uintptr)))
+ case reflect.Uint8:
+ return WrapUint32(uint32(val.(uint8)))
+ case reflect.Uint16:
+ return WrapUint32(uint32(val.(uint16)))
+ case reflect.Uint32:
+ return WrapUint32(uint32(val.(uint32)))
+ case reflect.Uint64:
+ return WrapUint64(val.(uint64))
+ case reflect.Float32:
+ return WrapFloat64(float64(val.(float32)))
+ case reflect.Float64:
+ return WrapFloat64(val.(float64))
+ case reflect.Bool:
+ if val.(bool) == true {
+ return &trueAny{}
+ }
+ return &falseAny{}
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
+}
+
+// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
+func (iter *Iterator) ReadAny() Any {
+ return iter.readAny()
+}
+
+func (iter *Iterator) readAny() Any {
+ c := iter.nextToken()
+ switch c {
+ case '"':
+ iter.unreadByte()
+ return &stringAny{baseAny{}, iter.ReadString()}
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ return &nilAny{}
+ case 't':
+ iter.skipThreeBytes('r', 'u', 'e') // true
+ return &trueAny{}
+ case 'f':
+ iter.skipFourBytes('a', 'l', 's', 'e') // false
+ return &falseAny{}
+ case '{':
+ return iter.readObjectAny()
+ case '[':
+ return iter.readArrayAny()
+ case '-':
+ return iter.readNumberAny(false)
+ case 0:
+ return &invalidAny{baseAny{}, errors.New("input is empty")}
+ default:
+ return iter.readNumberAny(true)
+ }
+}
+
+func (iter *Iterator) readNumberAny(positive bool) Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipNumber()
+ lazyBuf := iter.stopCapture()
+ return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func (iter *Iterator) readObjectAny() Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipObject()
+ lazyBuf := iter.stopCapture()
+ return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func (iter *Iterator) readArrayAny() Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipArray()
+ lazyBuf := iter.stopCapture()
+ return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func locateObjectField(iter *Iterator, target string) []byte {
+ var found []byte
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ if field == target {
+ found = iter.SkipAndReturnBytes()
+ return false
+ }
+ iter.Skip()
+ return true
+ })
+ return found
+}
+
+func locateArrayElement(iter *Iterator, target int) []byte {
+ var found []byte
+ n := 0
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ if n == target {
+ found = iter.SkipAndReturnBytes()
+ return false
+ }
+ iter.Skip()
+ n++
+ return true
+ })
+ return found
+}
+
+func locatePath(iter *Iterator, path []interface{}) Any {
+ for i, pathKeyObj := range path {
+ switch pathKey := pathKeyObj.(type) {
+ case string:
+ valueBytes := locateObjectField(iter, pathKey)
+ if valueBytes == nil {
+ return newInvalidAny(path[i:])
+ }
+ iter.ResetBytes(valueBytes)
+ case int:
+ valueBytes := locateArrayElement(iter, pathKey)
+ if valueBytes == nil {
+ return newInvalidAny(path[i:])
+ }
+ iter.ResetBytes(valueBytes)
+ case int32:
+ if '*' == pathKey {
+ return iter.readAny().Get(path[i:]...)
+ }
+ return newInvalidAny(path[i:])
+ default:
+ return newInvalidAny(path[i:])
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ return &invalidAny{baseAny{}, iter.Error}
+ }
+ return iter.readAny()
+}
+
+var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem()
+
+func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ == anyType {
+ return &directAnyCodec{}
+ }
+ if typ.Implements(anyType) {
+ return &anyCodec{
+ valType: typ,
+ }
+ }
+ return nil
+}
+
+func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == anyType {
+ return &directAnyCodec{}
+ }
+ if typ.Implements(anyType) {
+ return &anyCodec{
+ valType: typ,
+ }
+ }
+ return nil
+}
+
+type anyCodec struct {
+ valType reflect2.Type
+}
+
+func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ panic("not implemented")
+}
+
+func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := codec.valType.UnsafeIndirect(ptr)
+ any := obj.(Any)
+ any.WriteTo(stream)
+}
+
+func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ obj := codec.valType.UnsafeIndirect(ptr)
+ any := obj.(Any)
+ return any.Size() == 0
+}
+
+type directAnyCodec struct {
+}
+
+func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *(*Any)(ptr) = iter.readAny()
+}
+
+func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ any := *(*Any)(ptr)
+ any.WriteTo(stream)
+}
+
+func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ any := *(*Any)(ptr)
+ return any.Size() == 0
+}
diff --git a/vendor/github.com/json-iterator/go/any_array.go b/vendor/github.com/json-iterator/go/any_array.go
new file mode 100644
index 000000000..0449e9aa4
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_array.go
@@ -0,0 +1,278 @@
+package jsoniter
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+type arrayLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *arrayLazyAny) ValueType() ValueType {
+ return ArrayValue
+}
+
+func (any *arrayLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *arrayLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *arrayLazyAny) ToBool() bool {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.ReadArray()
+}
+
+func (any *arrayLazyAny) ToInt() int {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToInt32() int32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToInt64() int64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint() uint {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint32() uint32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint64() uint64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToFloat32() float32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToFloat64() float64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *arrayLazyAny) ToVal(val interface{}) {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadVal(val)
+}
+
+func (any *arrayLazyAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int:
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ valueBytes := locateArrayElement(iter, firstPath)
+ if valueBytes == nil {
+ return newInvalidAny(path)
+ }
+ iter.ResetBytes(valueBytes)
+ return locatePath(iter, path[1:])
+ case int32:
+ if '*' == firstPath {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ arr := make([]Any, 0)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ found := iter.readAny().Get(path[1:]...)
+ if found.ValueType() != InvalidValue {
+ arr = append(arr, found)
+ }
+ return true
+ })
+ return wrapArray(arr)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *arrayLazyAny) Size() int {
+ size := 0
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ size++
+ iter.Skip()
+ return true
+ })
+ return size
+}
+
+func (any *arrayLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *arrayLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
+
+type arrayAny struct {
+ baseAny
+ val reflect.Value
+}
+
+func wrapArray(val interface{}) *arrayAny {
+ return &arrayAny{baseAny{}, reflect.ValueOf(val)}
+}
+
+func (any *arrayAny) ValueType() ValueType {
+ return ArrayValue
+}
+
+func (any *arrayAny) MustBeValid() Any {
+ return any
+}
+
+func (any *arrayAny) LastError() error {
+ return nil
+}
+
+func (any *arrayAny) ToBool() bool {
+ return any.val.Len() != 0
+}
+
+func (any *arrayAny) ToInt() int {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToInt32() int32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToInt64() int64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint() uint {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint32() uint32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint64() uint64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToFloat32() float32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToFloat64() float64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToString() string {
+ str, _ := MarshalToString(any.val.Interface())
+ return str
+}
+
+func (any *arrayAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int:
+ if firstPath < 0 || firstPath >= any.val.Len() {
+ return newInvalidAny(path)
+ }
+ return Wrap(any.val.Index(firstPath).Interface())
+ case int32:
+ if '*' == firstPath {
+ mappedAll := make([]Any, 0)
+ for i := 0; i < any.val.Len(); i++ {
+ mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll = append(mappedAll, mapped)
+ }
+ }
+ return wrapArray(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *arrayAny) Size() int {
+ return any.val.Len()
+}
+
+func (any *arrayAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *arrayAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
diff --git a/vendor/github.com/json-iterator/go/any_bool.go b/vendor/github.com/json-iterator/go/any_bool.go
new file mode 100644
index 000000000..9452324af
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_bool.go
@@ -0,0 +1,137 @@
+package jsoniter
+
+type trueAny struct {
+ baseAny
+}
+
+func (any *trueAny) LastError() error {
+ return nil
+}
+
+func (any *trueAny) ToBool() bool {
+ return true
+}
+
+func (any *trueAny) ToInt() int {
+ return 1
+}
+
+func (any *trueAny) ToInt32() int32 {
+ return 1
+}
+
+func (any *trueAny) ToInt64() int64 {
+ return 1
+}
+
+func (any *trueAny) ToUint() uint {
+ return 1
+}
+
+func (any *trueAny) ToUint32() uint32 {
+ return 1
+}
+
+func (any *trueAny) ToUint64() uint64 {
+ return 1
+}
+
+func (any *trueAny) ToFloat32() float32 {
+ return 1
+}
+
+func (any *trueAny) ToFloat64() float64 {
+ return 1
+}
+
+func (any *trueAny) ToString() string {
+ return "true"
+}
+
+func (any *trueAny) WriteTo(stream *Stream) {
+ stream.WriteTrue()
+}
+
+func (any *trueAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *trueAny) GetInterface() interface{} {
+ return true
+}
+
+func (any *trueAny) ValueType() ValueType {
+ return BoolValue
+}
+
+func (any *trueAny) MustBeValid() Any {
+ return any
+}
+
+type falseAny struct {
+ baseAny
+}
+
+func (any *falseAny) LastError() error {
+ return nil
+}
+
+func (any *falseAny) ToBool() bool {
+ return false
+}
+
+func (any *falseAny) ToInt() int {
+ return 0
+}
+
+func (any *falseAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *falseAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *falseAny) ToUint() uint {
+ return 0
+}
+
+func (any *falseAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *falseAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *falseAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *falseAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *falseAny) ToString() string {
+ return "false"
+}
+
+func (any *falseAny) WriteTo(stream *Stream) {
+ stream.WriteFalse()
+}
+
+func (any *falseAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *falseAny) GetInterface() interface{} {
+ return false
+}
+
+func (any *falseAny) ValueType() ValueType {
+ return BoolValue
+}
+
+func (any *falseAny) MustBeValid() Any {
+ return any
+}
diff --git a/vendor/github.com/json-iterator/go/any_float.go b/vendor/github.com/json-iterator/go/any_float.go
new file mode 100644
index 000000000..35fdb0949
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_float.go
@@ -0,0 +1,83 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type floatAny struct {
+ baseAny
+ val float64
+}
+
+func (any *floatAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *floatAny) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *floatAny) MustBeValid() Any {
+ return any
+}
+
+func (any *floatAny) LastError() error {
+ return nil
+}
+
+func (any *floatAny) ToBool() bool {
+ return any.ToFloat64() != 0
+}
+
+func (any *floatAny) ToInt() int {
+ return int(any.val)
+}
+
+func (any *floatAny) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *floatAny) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *floatAny) ToUint() uint {
+ if any.val > 0 {
+ return uint(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToUint32() uint32 {
+ if any.val > 0 {
+ return uint32(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToUint64() uint64 {
+ if any.val > 0 {
+ return uint64(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *floatAny) ToFloat64() float64 {
+ return any.val
+}
+
+func (any *floatAny) ToString() string {
+ return strconv.FormatFloat(any.val, 'E', -1, 64)
+}
+
+func (any *floatAny) WriteTo(stream *Stream) {
+ stream.WriteFloat64(any.val)
+}
+
+func (any *floatAny) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_int32.go b/vendor/github.com/json-iterator/go/any_int32.go
new file mode 100644
index 000000000..1b56f3991
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_int32.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type int32Any struct {
+ baseAny
+ val int32
+}
+
+func (any *int32Any) LastError() error {
+ return nil
+}
+
+func (any *int32Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *int32Any) MustBeValid() Any {
+ return any
+}
+
+func (any *int32Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *int32Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *int32Any) ToInt32() int32 {
+ return any.val
+}
+
+func (any *int32Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *int32Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *int32Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *int32Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *int32Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *int32Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *int32Any) ToString() string {
+ return strconv.FormatInt(int64(any.val), 10)
+}
+
+func (any *int32Any) WriteTo(stream *Stream) {
+ stream.WriteInt32(any.val)
+}
+
+func (any *int32Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *int32Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_int64.go b/vendor/github.com/json-iterator/go/any_int64.go
new file mode 100644
index 000000000..c440d72b6
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_int64.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type int64Any struct {
+ baseAny
+ val int64
+}
+
+func (any *int64Any) LastError() error {
+ return nil
+}
+
+func (any *int64Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *int64Any) MustBeValid() Any {
+ return any
+}
+
+func (any *int64Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *int64Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *int64Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *int64Any) ToInt64() int64 {
+ return any.val
+}
+
+func (any *int64Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *int64Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *int64Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *int64Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *int64Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *int64Any) ToString() string {
+ return strconv.FormatInt(any.val, 10)
+}
+
+func (any *int64Any) WriteTo(stream *Stream) {
+ stream.WriteInt64(any.val)
+}
+
+func (any *int64Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *int64Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_invalid.go b/vendor/github.com/json-iterator/go/any_invalid.go
new file mode 100644
index 000000000..1d859eac3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_invalid.go
@@ -0,0 +1,82 @@
+package jsoniter
+
+import "fmt"
+
+type invalidAny struct {
+ baseAny
+ err error
+}
+
+func newInvalidAny(path []interface{}) *invalidAny {
+ return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
+}
+
+func (any *invalidAny) LastError() error {
+ return any.err
+}
+
+func (any *invalidAny) ValueType() ValueType {
+ return InvalidValue
+}
+
+func (any *invalidAny) MustBeValid() Any {
+ panic(any.err)
+}
+
+func (any *invalidAny) ToBool() bool {
+ return false
+}
+
+func (any *invalidAny) ToInt() int {
+ return 0
+}
+
+func (any *invalidAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *invalidAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *invalidAny) ToUint() uint {
+ return 0
+}
+
+func (any *invalidAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *invalidAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *invalidAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *invalidAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *invalidAny) ToString() string {
+ return ""
+}
+
+func (any *invalidAny) WriteTo(stream *Stream) {
+}
+
+func (any *invalidAny) Get(path ...interface{}) Any {
+ if any.err == nil {
+ return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
+}
+
+func (any *invalidAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *invalidAny) GetInterface() interface{} {
+ return nil
+}
diff --git a/vendor/github.com/json-iterator/go/any_nil.go b/vendor/github.com/json-iterator/go/any_nil.go
new file mode 100644
index 000000000..d04cb54c1
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_nil.go
@@ -0,0 +1,69 @@
+package jsoniter
+
+type nilAny struct {
+ baseAny
+}
+
+func (any *nilAny) LastError() error {
+ return nil
+}
+
+func (any *nilAny) ValueType() ValueType {
+ return NilValue
+}
+
+func (any *nilAny) MustBeValid() Any {
+ return any
+}
+
+func (any *nilAny) ToBool() bool {
+ return false
+}
+
+func (any *nilAny) ToInt() int {
+ return 0
+}
+
+func (any *nilAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *nilAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *nilAny) ToUint() uint {
+ return 0
+}
+
+func (any *nilAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *nilAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *nilAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *nilAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *nilAny) ToString() string {
+ return ""
+}
+
+func (any *nilAny) WriteTo(stream *Stream) {
+ stream.WriteNil()
+}
+
+func (any *nilAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *nilAny) GetInterface() interface{} {
+ return nil
+}
diff --git a/vendor/github.com/json-iterator/go/any_number.go b/vendor/github.com/json-iterator/go/any_number.go
new file mode 100644
index 000000000..9d1e901a6
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_number.go
@@ -0,0 +1,123 @@
+package jsoniter
+
+import (
+ "io"
+ "unsafe"
+)
+
+type numberLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *numberLazyAny) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *numberLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *numberLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *numberLazyAny) ToBool() bool {
+ return any.ToFloat64() != 0
+}
+
+func (any *numberLazyAny) ToInt() int {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToInt32() int32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToInt64() int64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint() uint {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint32() uint32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint64() uint64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToFloat32() float32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadFloat32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToFloat64() float64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadFloat64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *numberLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *numberLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
diff --git a/vendor/github.com/json-iterator/go/any_object.go b/vendor/github.com/json-iterator/go/any_object.go
new file mode 100644
index 000000000..c44ef5c98
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_object.go
@@ -0,0 +1,374 @@
+package jsoniter
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+type objectLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *objectLazyAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *objectLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *objectLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *objectLazyAny) ToBool() bool {
+ return true
+}
+
+func (any *objectLazyAny) ToInt() int {
+ return 0
+}
+
+func (any *objectLazyAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint() uint {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *objectLazyAny) ToVal(obj interface{}) {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadVal(obj)
+}
+
+func (any *objectLazyAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case string:
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ valueBytes := locateObjectField(iter, firstPath)
+ if valueBytes == nil {
+ return newInvalidAny(path)
+ }
+ iter.ResetBytes(valueBytes)
+ return locatePath(iter, path[1:])
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadMapCB(func(iter *Iterator, field string) bool {
+ mapped := locatePath(iter, path[1:])
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[field] = mapped
+ }
+ return true
+ })
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *objectLazyAny) Keys() []string {
+ keys := []string{}
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadMapCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ keys = append(keys, field)
+ return true
+ })
+ return keys
+}
+
+func (any *objectLazyAny) Size() int {
+ size := 0
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ size++
+ return true
+ })
+ return size
+}
+
+func (any *objectLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *objectLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
+
+type objectAny struct {
+ baseAny
+ err error
+ val reflect.Value
+}
+
+func wrapStruct(val interface{}) *objectAny {
+ return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
+}
+
+func (any *objectAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *objectAny) MustBeValid() Any {
+ return any
+}
+
+func (any *objectAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *objectAny) LastError() error {
+ return any.err
+}
+
+func (any *objectAny) ToBool() bool {
+ return any.val.NumField() != 0
+}
+
+func (any *objectAny) ToInt() int {
+ return 0
+}
+
+func (any *objectAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *objectAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *objectAny) ToUint() uint {
+ return 0
+}
+
+func (any *objectAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *objectAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *objectAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *objectAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *objectAny) ToString() string {
+ str, err := MarshalToString(any.val.Interface())
+ any.err = err
+ return str
+}
+
+func (any *objectAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case string:
+ field := any.val.FieldByName(firstPath)
+ if !field.IsValid() {
+ return newInvalidAny(path)
+ }
+ return Wrap(field.Interface())
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ for i := 0; i < any.val.NumField(); i++ {
+ field := any.val.Field(i)
+ if field.CanInterface() {
+ mapped := Wrap(field.Interface()).Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[any.val.Type().Field(i).Name] = mapped
+ }
+ }
+ }
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *objectAny) Keys() []string {
+ keys := make([]string, 0, any.val.NumField())
+ for i := 0; i < any.val.NumField(); i++ {
+ keys = append(keys, any.val.Type().Field(i).Name)
+ }
+ return keys
+}
+
+func (any *objectAny) Size() int {
+ return any.val.NumField()
+}
+
+func (any *objectAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *objectAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
+
+type mapAny struct {
+ baseAny
+ err error
+ val reflect.Value
+}
+
+func wrapMap(val interface{}) *mapAny {
+ return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
+}
+
+func (any *mapAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *mapAny) MustBeValid() Any {
+ return any
+}
+
+func (any *mapAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *mapAny) LastError() error {
+ return any.err
+}
+
+func (any *mapAny) ToBool() bool {
+ return true
+}
+
+func (any *mapAny) ToInt() int {
+ return 0
+}
+
+func (any *mapAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *mapAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *mapAny) ToUint() uint {
+ return 0
+}
+
+func (any *mapAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *mapAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *mapAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *mapAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *mapAny) ToString() string {
+ str, err := MarshalToString(any.val.Interface())
+ any.err = err
+ return str
+}
+
+func (any *mapAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ for _, key := range any.val.MapKeys() {
+ keyAsStr := key.String()
+ element := Wrap(any.val.MapIndex(key).Interface())
+ mapped := element.Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[keyAsStr] = mapped
+ }
+ }
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ value := any.val.MapIndex(reflect.ValueOf(firstPath))
+ if !value.IsValid() {
+ return newInvalidAny(path)
+ }
+ return Wrap(value.Interface())
+ }
+}
+
+func (any *mapAny) Keys() []string {
+ keys := make([]string, 0, any.val.Len())
+ for _, key := range any.val.MapKeys() {
+ keys = append(keys, key.String())
+ }
+ return keys
+}
+
+func (any *mapAny) Size() int {
+ return any.val.Len()
+}
+
+func (any *mapAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *mapAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
diff --git a/vendor/github.com/json-iterator/go/any_str.go b/vendor/github.com/json-iterator/go/any_str.go
new file mode 100644
index 000000000..a4b93c78c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_str.go
@@ -0,0 +1,166 @@
+package jsoniter
+
+import (
+ "fmt"
+ "strconv"
+)
+
+type stringAny struct {
+ baseAny
+ val string
+}
+
+func (any *stringAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
+}
+
+func (any *stringAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *stringAny) ValueType() ValueType {
+ return StringValue
+}
+
+func (any *stringAny) MustBeValid() Any {
+ return any
+}
+
+func (any *stringAny) LastError() error {
+ return nil
+}
+
+func (any *stringAny) ToBool() bool {
+ str := any.ToString()
+ if str == "0" {
+ return false
+ }
+ for _, c := range str {
+ switch c {
+ case ' ', '\n', '\r', '\t':
+ default:
+ return true
+ }
+ }
+ return false
+}
+
+func (any *stringAny) ToInt() int {
+ return int(any.ToInt64())
+
+}
+
+func (any *stringAny) ToInt32() int32 {
+ return int32(any.ToInt64())
+}
+
+func (any *stringAny) ToInt64() int64 {
+ if any.val == "" {
+ return 0
+ }
+
+ flag := 1
+ startPos := 0
+ endPos := 0
+ if any.val[0] == '+' || any.val[0] == '-' {
+ startPos = 1
+ }
+
+ if any.val[0] == '-' {
+ flag = -1
+ }
+
+ for i := startPos; i < len(any.val); i++ {
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ break
+ }
+ }
+ parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
+ return int64(flag) * parsed
+}
+
+func (any *stringAny) ToUint() uint {
+ return uint(any.ToUint64())
+}
+
+func (any *stringAny) ToUint32() uint32 {
+ return uint32(any.ToUint64())
+}
+
+func (any *stringAny) ToUint64() uint64 {
+ if any.val == "" {
+ return 0
+ }
+
+ startPos := 0
+ endPos := 0
+
+ if any.val[0] == '-' {
+ return 0
+ }
+ if any.val[0] == '+' {
+ startPos = 1
+ }
+
+ for i := startPos; i < len(any.val); i++ {
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ break
+ }
+ }
+ parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
+ return parsed
+}
+
+func (any *stringAny) ToFloat32() float32 {
+ return float32(any.ToFloat64())
+}
+
+func (any *stringAny) ToFloat64() float64 {
+ if len(any.val) == 0 {
+ return 0
+ }
+
+ // first char invalid
+ if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
+ return 0
+ }
+
+ // extract valid num expression from string
+ // eg 123true => 123, -12.12xxa => -12.12
+ endPos := 1
+ for i := 1; i < len(any.val); i++ {
+ if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
+ endPos = i + 1
+ continue
+ }
+
+ // end position is the first char which is not digit
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ endPos = i
+ break
+ }
+ }
+ parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
+ return parsed
+}
+
+func (any *stringAny) ToString() string {
+ return any.val
+}
+
+func (any *stringAny) WriteTo(stream *Stream) {
+ stream.WriteString(any.val)
+}
+
+func (any *stringAny) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_uint32.go b/vendor/github.com/json-iterator/go/any_uint32.go
new file mode 100644
index 000000000..656bbd33d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_uint32.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type uint32Any struct {
+ baseAny
+ val uint32
+}
+
+func (any *uint32Any) LastError() error {
+ return nil
+}
+
+func (any *uint32Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *uint32Any) MustBeValid() Any {
+ return any
+}
+
+func (any *uint32Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *uint32Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *uint32Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *uint32Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *uint32Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *uint32Any) ToUint32() uint32 {
+ return any.val
+}
+
+func (any *uint32Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *uint32Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *uint32Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *uint32Any) ToString() string {
+ return strconv.FormatInt(int64(any.val), 10)
+}
+
+func (any *uint32Any) WriteTo(stream *Stream) {
+ stream.WriteUint32(any.val)
+}
+
+func (any *uint32Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *uint32Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_uint64.go b/vendor/github.com/json-iterator/go/any_uint64.go
new file mode 100644
index 000000000..7df2fce33
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_uint64.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type uint64Any struct {
+ baseAny
+ val uint64
+}
+
+func (any *uint64Any) LastError() error {
+ return nil
+}
+
+func (any *uint64Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *uint64Any) MustBeValid() Any {
+ return any
+}
+
+func (any *uint64Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *uint64Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *uint64Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *uint64Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *uint64Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *uint64Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *uint64Any) ToUint64() uint64 {
+ return any.val
+}
+
+func (any *uint64Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *uint64Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *uint64Any) ToString() string {
+ return strconv.FormatUint(any.val, 10)
+}
+
+func (any *uint64Any) WriteTo(stream *Stream) {
+ stream.WriteUint64(any.val)
+}
+
+func (any *uint64Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *uint64Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/build.sh b/vendor/github.com/json-iterator/go/build.sh
new file mode 100755
index 000000000..b45ef6883
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/build.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -e
+set -x
+
+if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
+ mkdir -p /tmp/build-golang/src/github.com/json-iterator
+ ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
+fi
+export GOPATH=/tmp/build-golang
+go get -u github.com/golang/dep/cmd/dep
+cd /tmp/build-golang/src/github.com/json-iterator/go
+exec $GOPATH/bin/dep ensure -update
diff --git a/vendor/github.com/json-iterator/go/config.go b/vendor/github.com/json-iterator/go/config.go
new file mode 100644
index 000000000..8c58fcba5
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/config.go
@@ -0,0 +1,375 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "io"
+ "reflect"
+ "sync"
+ "unsafe"
+
+ "github.com/modern-go/concurrent"
+ "github.com/modern-go/reflect2"
+)
+
+// Config customize how the API should behave.
+// The API is created from Config by Froze.
+type Config struct {
+ IndentionStep int
+ MarshalFloatWith6Digits bool
+ EscapeHTML bool
+ SortMapKeys bool
+ UseNumber bool
+ DisallowUnknownFields bool
+ TagKey string
+ OnlyTaggedField bool
+ ValidateJsonRawMessage bool
+ ObjectFieldMustBeSimpleString bool
+ CaseSensitive bool
+}
+
+// API the public interface of this package.
+// Primary Marshal and Unmarshal.
+type API interface {
+ IteratorPool
+ StreamPool
+ MarshalToString(v interface{}) (string, error)
+ Marshal(v interface{}) ([]byte, error)
+ MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
+ UnmarshalFromString(str string, v interface{}) error
+ Unmarshal(data []byte, v interface{}) error
+ Get(data []byte, path ...interface{}) Any
+ NewEncoder(writer io.Writer) *Encoder
+ NewDecoder(reader io.Reader) *Decoder
+ Valid(data []byte) bool
+ RegisterExtension(extension Extension)
+ DecoderOf(typ reflect2.Type) ValDecoder
+ EncoderOf(typ reflect2.Type) ValEncoder
+}
+
+// ConfigDefault the default API
+var ConfigDefault = Config{
+ EscapeHTML: true,
+}.Froze()
+
+// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
+var ConfigCompatibleWithStandardLibrary = Config{
+ EscapeHTML: true,
+ SortMapKeys: true,
+ ValidateJsonRawMessage: true,
+}.Froze()
+
+// ConfigFastest marshals float with only 6 digits precision
+var ConfigFastest = Config{
+ EscapeHTML: false,
+ MarshalFloatWith6Digits: true, // will lose precession
+ ObjectFieldMustBeSimpleString: true, // do not unescape object field
+}.Froze()
+
+type frozenConfig struct {
+ configBeforeFrozen Config
+ sortMapKeys bool
+ indentionStep int
+ objectFieldMustBeSimpleString bool
+ onlyTaggedField bool
+ disallowUnknownFields bool
+ decoderCache *concurrent.Map
+ encoderCache *concurrent.Map
+ encoderExtension Extension
+ decoderExtension Extension
+ extraExtensions []Extension
+ streamPool *sync.Pool
+ iteratorPool *sync.Pool
+ caseSensitive bool
+}
+
+func (cfg *frozenConfig) initCache() {
+ cfg.decoderCache = concurrent.NewMap()
+ cfg.encoderCache = concurrent.NewMap()
+}
+
+func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
+ cfg.decoderCache.Store(cacheKey, decoder)
+}
+
+func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
+ cfg.encoderCache.Store(cacheKey, encoder)
+}
+
+func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
+ decoder, found := cfg.decoderCache.Load(cacheKey)
+ if found {
+ return decoder.(ValDecoder)
+ }
+ return nil
+}
+
+func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
+ encoder, found := cfg.encoderCache.Load(cacheKey)
+ if found {
+ return encoder.(ValEncoder)
+ }
+ return nil
+}
+
+var cfgCache = concurrent.NewMap()
+
+func getFrozenConfigFromCache(cfg Config) *frozenConfig {
+ obj, found := cfgCache.Load(cfg)
+ if found {
+ return obj.(*frozenConfig)
+ }
+ return nil
+}
+
+func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
+ cfgCache.Store(cfg, frozenConfig)
+}
+
+// Froze forge API from config
+func (cfg Config) Froze() API {
+ api := &frozenConfig{
+ sortMapKeys: cfg.SortMapKeys,
+ indentionStep: cfg.IndentionStep,
+ objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
+ onlyTaggedField: cfg.OnlyTaggedField,
+ disallowUnknownFields: cfg.DisallowUnknownFields,
+ caseSensitive: cfg.CaseSensitive,
+ }
+ api.streamPool = &sync.Pool{
+ New: func() interface{} {
+ return NewStream(api, nil, 512)
+ },
+ }
+ api.iteratorPool = &sync.Pool{
+ New: func() interface{} {
+ return NewIterator(api)
+ },
+ }
+ api.initCache()
+ encoderExtension := EncoderExtension{}
+ decoderExtension := DecoderExtension{}
+ if cfg.MarshalFloatWith6Digits {
+ api.marshalFloatWith6Digits(encoderExtension)
+ }
+ if cfg.EscapeHTML {
+ api.escapeHTML(encoderExtension)
+ }
+ if cfg.UseNumber {
+ api.useNumber(decoderExtension)
+ }
+ if cfg.ValidateJsonRawMessage {
+ api.validateJsonRawMessage(encoderExtension)
+ }
+ api.encoderExtension = encoderExtension
+ api.decoderExtension = decoderExtension
+ api.configBeforeFrozen = cfg
+ return api
+}
+
+func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
+ api := getFrozenConfigFromCache(cfg)
+ if api != nil {
+ return api
+ }
+ api = cfg.Froze().(*frozenConfig)
+ for _, extension := range extraExtensions {
+ api.RegisterExtension(extension)
+ }
+ addFrozenConfigToCache(cfg, api)
+ return api
+}
+
+func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
+ encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
+ rawMessage := *(*json.RawMessage)(ptr)
+ iter := cfg.BorrowIterator([]byte(rawMessage))
+ iter.Read()
+ if iter.Error != nil {
+ stream.WriteRaw("null")
+ } else {
+ cfg.ReturnIterator(iter)
+ stream.WriteRaw(string(rawMessage))
+ }
+ }, func(ptr unsafe.Pointer) bool {
+ return len(*((*json.RawMessage)(ptr))) == 0
+ }}
+ extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
+ extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
+}
+
+func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
+ extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
+ exitingValue := *((*interface{})(ptr))
+ if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
+ iter.ReadVal(exitingValue)
+ return
+ }
+ if iter.WhatIsNext() == NumberValue {
+ *((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
+ } else {
+ *((*interface{})(ptr)) = iter.Read()
+ }
+ }}
+}
+func (cfg *frozenConfig) getTagKey() string {
+ tagKey := cfg.configBeforeFrozen.TagKey
+ if tagKey == "" {
+ return "json"
+ }
+ return tagKey
+}
+
+func (cfg *frozenConfig) RegisterExtension(extension Extension) {
+ cfg.extraExtensions = append(cfg.extraExtensions, extension)
+ copied := cfg.configBeforeFrozen
+ cfg.configBeforeFrozen = copied
+}
+
+type lossyFloat32Encoder struct {
+}
+
+func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat32Lossy(*((*float32)(ptr)))
+}
+
+func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float32)(ptr)) == 0
+}
+
+type lossyFloat64Encoder struct {
+}
+
+func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat64Lossy(*((*float64)(ptr)))
+}
+
+func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float64)(ptr)) == 0
+}
+
+// EnableLossyFloatMarshalling keeps 10**(-6) precision
+// for float variables for better performance.
+func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) {
+ // for better performance
+ extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{}
+ extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{}
+}
+
+type htmlEscapedStringEncoder struct {
+}
+
+func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ stream.WriteStringWithHTMLEscaped(str)
+}
+
+func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*string)(ptr)) == ""
+}
+
+func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) {
+ encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{}
+}
+
+func (cfg *frozenConfig) cleanDecoders() {
+ typeDecoders = map[string]ValDecoder{}
+ fieldDecoders = map[string]ValDecoder{}
+ *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
+}
+
+func (cfg *frozenConfig) cleanEncoders() {
+ typeEncoders = map[string]ValEncoder{}
+ fieldEncoders = map[string]ValEncoder{}
+ *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
+}
+
+func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
+ stream := cfg.BorrowStream(nil)
+ defer cfg.ReturnStream(stream)
+ stream.WriteVal(v)
+ if stream.Error != nil {
+ return "", stream.Error
+ }
+ return string(stream.Buffer()), nil
+}
+
+func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
+ stream := cfg.BorrowStream(nil)
+ defer cfg.ReturnStream(stream)
+ stream.WriteVal(v)
+ if stream.Error != nil {
+ return nil, stream.Error
+ }
+ result := stream.Buffer()
+ copied := make([]byte, len(result))
+ copy(copied, result)
+ return copied, nil
+}
+
+func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ if prefix != "" {
+ panic("prefix is not supported")
+ }
+ for _, r := range indent {
+ if r != ' ' {
+ panic("indent can only be space")
+ }
+ }
+ newCfg := cfg.configBeforeFrozen
+ newCfg.IndentionStep = len(indent)
+ return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
+}
+
+func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
+ data := []byte(str)
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.ReadVal(v)
+ c := iter.nextToken()
+ if c == 0 {
+ if iter.Error == io.EOF {
+ return nil
+ }
+ return iter.Error
+ }
+ iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
+ return iter.Error
+}
+
+func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ return locatePath(iter, path)
+}
+
+func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.ReadVal(v)
+ c := iter.nextToken()
+ if c == 0 {
+ if iter.Error == io.EOF {
+ return nil
+ }
+ return iter.Error
+ }
+ iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
+ return iter.Error
+}
+
+func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
+ stream := NewStream(cfg, writer, 512)
+ return &Encoder{stream}
+}
+
+func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
+ iter := Parse(cfg, reader, 512)
+ return &Decoder{iter}
+}
+
+func (cfg *frozenConfig) Valid(data []byte) bool {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.Skip()
+ return iter.Error == nil
+}
diff --git a/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
new file mode 100644
index 000000000..3095662b0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
@@ -0,0 +1,7 @@
+| json type \ dest type | bool | int | uint | float |string|
+| --- | --- | --- | --- |--|--|
+| number | positive => true
negative => true
zero => false| 23.2 => 23
-32.1 => -32| 12.1 => 12
-12.1 => 0|as normal|same as origin|
+| string | empty string => false
string "0" => false
other strings => true | "123.32" => 123
"-123.4" => -123
"123.23xxxw" => 123
"abcde12" => 0
"-32.1" => -32| 13.2 => 13
-1.1 => 0 |12.1 => 12.1
-12.3 => -12.3
12.4xxa => 12.4
+1.1e2 =>110 |same as origin|
+| bool | true => true
false => false| true => 1
false => 0 | true => 1
false => 0 |true => 1
false => 0|true => "true"
false => "false"|
+| object | true | 0 | 0 |0|originnal json|
+| array | empty array => false
nonempty array => true| [] => 0
[1,2] => 1 | [] => 0
[1,2] => 1 |[] => 0
[1,2] => 1|original json|
\ No newline at end of file
diff --git a/vendor/github.com/json-iterator/go/iter.go b/vendor/github.com/json-iterator/go/iter.go
new file mode 100644
index 000000000..95ae54fbf
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter.go
@@ -0,0 +1,322 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+)
+
+// ValueType the type for JSON element
+type ValueType int
+
+const (
+ // InvalidValue invalid JSON element
+ InvalidValue ValueType = iota
+ // StringValue JSON element "string"
+ StringValue
+ // NumberValue JSON element 100 or 0.10
+ NumberValue
+ // NilValue JSON element null
+ NilValue
+ // BoolValue JSON element true or false
+ BoolValue
+ // ArrayValue JSON element []
+ ArrayValue
+ // ObjectValue JSON element {}
+ ObjectValue
+)
+
+var hexDigits []byte
+var valueTypes []ValueType
+
+func init() {
+ hexDigits = make([]byte, 256)
+ for i := 0; i < len(hexDigits); i++ {
+ hexDigits[i] = 255
+ }
+ for i := '0'; i <= '9'; i++ {
+ hexDigits[i] = byte(i - '0')
+ }
+ for i := 'a'; i <= 'f'; i++ {
+ hexDigits[i] = byte((i - 'a') + 10)
+ }
+ for i := 'A'; i <= 'F'; i++ {
+ hexDigits[i] = byte((i - 'A') + 10)
+ }
+ valueTypes = make([]ValueType, 256)
+ for i := 0; i < len(valueTypes); i++ {
+ valueTypes[i] = InvalidValue
+ }
+ valueTypes['"'] = StringValue
+ valueTypes['-'] = NumberValue
+ valueTypes['0'] = NumberValue
+ valueTypes['1'] = NumberValue
+ valueTypes['2'] = NumberValue
+ valueTypes['3'] = NumberValue
+ valueTypes['4'] = NumberValue
+ valueTypes['5'] = NumberValue
+ valueTypes['6'] = NumberValue
+ valueTypes['7'] = NumberValue
+ valueTypes['8'] = NumberValue
+ valueTypes['9'] = NumberValue
+ valueTypes['t'] = BoolValue
+ valueTypes['f'] = BoolValue
+ valueTypes['n'] = NilValue
+ valueTypes['['] = ArrayValue
+ valueTypes['{'] = ObjectValue
+}
+
+// Iterator is a io.Reader like object, with JSON specific read functions.
+// Error is not returned as return value, but stored as Error member on this iterator instance.
+type Iterator struct {
+ cfg *frozenConfig
+ reader io.Reader
+ buf []byte
+ head int
+ tail int
+ captureStartedAt int
+ captured []byte
+ Error error
+ Attachment interface{} // open for customized decoder
+}
+
+// NewIterator creates an empty Iterator instance
+func NewIterator(cfg API) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: nil,
+ buf: nil,
+ head: 0,
+ tail: 0,
+ }
+}
+
+// Parse creates an Iterator instance from io.Reader
+func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: reader,
+ buf: make([]byte, bufSize),
+ head: 0,
+ tail: 0,
+ }
+}
+
+// ParseBytes creates an Iterator instance from byte array
+func ParseBytes(cfg API, input []byte) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: nil,
+ buf: input,
+ head: 0,
+ tail: len(input),
+ }
+}
+
+// ParseString creates an Iterator instance from string
+func ParseString(cfg API, input string) *Iterator {
+ return ParseBytes(cfg, []byte(input))
+}
+
+// Pool returns a pool can provide more iterator with same configuration
+func (iter *Iterator) Pool() IteratorPool {
+ return iter.cfg
+}
+
+// Reset reuse iterator instance by specifying another reader
+func (iter *Iterator) Reset(reader io.Reader) *Iterator {
+ iter.reader = reader
+ iter.head = 0
+ iter.tail = 0
+ return iter
+}
+
+// ResetBytes reuse iterator instance by specifying another byte array as input
+func (iter *Iterator) ResetBytes(input []byte) *Iterator {
+ iter.reader = nil
+ iter.buf = input
+ iter.head = 0
+ iter.tail = len(input)
+ return iter
+}
+
+// WhatIsNext gets ValueType of relatively next json element
+func (iter *Iterator) WhatIsNext() ValueType {
+ valueType := valueTypes[iter.nextToken()]
+ iter.unreadByte()
+ return valueType
+}
+
+func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\t', '\r':
+ continue
+ }
+ iter.head = i
+ return false
+ }
+ return true
+}
+
+func (iter *Iterator) isObjectEnd() bool {
+ c := iter.nextToken()
+ if c == ',' {
+ return false
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
+ return true
+}
+
+func (iter *Iterator) nextToken() byte {
+ // a variation of skip whitespaces, returning the next non-whitespace token
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\t', '\r':
+ continue
+ }
+ iter.head = i + 1
+ return c
+ }
+ if !iter.loadMore() {
+ return 0
+ }
+ }
+}
+
+// ReportError record a error in iterator instance with current position.
+func (iter *Iterator) ReportError(operation string, msg string) {
+ if iter.Error != nil {
+ if iter.Error != io.EOF {
+ return
+ }
+ }
+ peekStart := iter.head - 10
+ if peekStart < 0 {
+ peekStart = 0
+ }
+ peekEnd := iter.head + 10
+ if peekEnd > iter.tail {
+ peekEnd = iter.tail
+ }
+ parsing := string(iter.buf[peekStart:peekEnd])
+ contextStart := iter.head - 50
+ if contextStart < 0 {
+ contextStart = 0
+ }
+ contextEnd := iter.head + 50
+ if contextEnd > iter.tail {
+ contextEnd = iter.tail
+ }
+ context := string(iter.buf[contextStart:contextEnd])
+ iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
+ operation, msg, iter.head-peekStart, parsing, context)
+}
+
+// CurrentBuffer gets current buffer as string for debugging purpose
+func (iter *Iterator) CurrentBuffer() string {
+ peekStart := iter.head - 10
+ if peekStart < 0 {
+ peekStart = 0
+ }
+ return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
+ string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
+}
+
+func (iter *Iterator) readByte() (ret byte) {
+ if iter.head == iter.tail {
+ if iter.loadMore() {
+ ret = iter.buf[iter.head]
+ iter.head++
+ return ret
+ }
+ return 0
+ }
+ ret = iter.buf[iter.head]
+ iter.head++
+ return ret
+}
+
+func (iter *Iterator) loadMore() bool {
+ if iter.reader == nil {
+ if iter.Error == nil {
+ iter.head = iter.tail
+ iter.Error = io.EOF
+ }
+ return false
+ }
+ if iter.captured != nil {
+ iter.captured = append(iter.captured,
+ iter.buf[iter.captureStartedAt:iter.tail]...)
+ iter.captureStartedAt = 0
+ }
+ for {
+ n, err := iter.reader.Read(iter.buf)
+ if n == 0 {
+ if err != nil {
+ if iter.Error == nil {
+ iter.Error = err
+ }
+ return false
+ }
+ } else {
+ iter.head = 0
+ iter.tail = n
+ return true
+ }
+ }
+}
+
+func (iter *Iterator) unreadByte() {
+ if iter.Error != nil {
+ return
+ }
+ iter.head--
+ return
+}
+
+// Read read the next JSON element as generic interface{}.
+func (iter *Iterator) Read() interface{} {
+ valueType := iter.WhatIsNext()
+ switch valueType {
+ case StringValue:
+ return iter.ReadString()
+ case NumberValue:
+ if iter.cfg.configBeforeFrozen.UseNumber {
+ return json.Number(iter.readNumberAsString())
+ }
+ return iter.ReadFloat64()
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ return nil
+ case BoolValue:
+ return iter.ReadBool()
+ case ArrayValue:
+ arr := []interface{}{}
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ var elem interface{}
+ iter.ReadVal(&elem)
+ arr = append(arr, elem)
+ return true
+ })
+ return arr
+ case ObjectValue:
+ obj := map[string]interface{}{}
+ iter.ReadMapCB(func(Iter *Iterator, field string) bool {
+ var elem interface{}
+ iter.ReadVal(&elem)
+ obj[field] = elem
+ return true
+ })
+ return obj
+ default:
+ iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
+ return nil
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/iter_array.go b/vendor/github.com/json-iterator/go/iter_array.go
new file mode 100644
index 000000000..6188cb457
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_array.go
@@ -0,0 +1,58 @@
+package jsoniter
+
+// ReadArray read array element, tells if the array has more element to read.
+func (iter *Iterator) ReadArray() (ret bool) {
+ c := iter.nextToken()
+ switch c {
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l')
+ return false // null
+ case '[':
+ c = iter.nextToken()
+ if c != ']' {
+ iter.unreadByte()
+ return true
+ }
+ return false
+ case ']':
+ return false
+ case ',':
+ return true
+ default:
+ iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
+ return
+ }
+}
+
+// ReadArrayCB read array with callback
+func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
+ c := iter.nextToken()
+ if c == '[' {
+ c = iter.nextToken()
+ if c != ']' {
+ iter.unreadByte()
+ if !callback(iter) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ if !callback(iter) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != ']' {
+ iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
+ return false
+ }
+ return true
+ }
+ return true
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
+ return false
+}
diff --git a/vendor/github.com/json-iterator/go/iter_float.go b/vendor/github.com/json-iterator/go/iter_float.go
new file mode 100644
index 000000000..4f883c095
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_float.go
@@ -0,0 +1,347 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "io"
+ "math/big"
+ "strconv"
+ "strings"
+ "unsafe"
+)
+
+var floatDigits []int8
+
+const invalidCharForNumber = int8(-1)
+const endOfNumber = int8(-2)
+const dotInNumber = int8(-3)
+
+func init() {
+ floatDigits = make([]int8, 256)
+ for i := 0; i < len(floatDigits); i++ {
+ floatDigits[i] = invalidCharForNumber
+ }
+ for i := int8('0'); i <= int8('9'); i++ {
+ floatDigits[i] = i - int8('0')
+ }
+ floatDigits[','] = endOfNumber
+ floatDigits[']'] = endOfNumber
+ floatDigits['}'] = endOfNumber
+ floatDigits[' '] = endOfNumber
+ floatDigits['\t'] = endOfNumber
+ floatDigits['\n'] = endOfNumber
+ floatDigits['.'] = dotInNumber
+}
+
+// ReadBigFloat read big.Float
+func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return nil
+ }
+ prec := 64
+ if len(str) > prec {
+ prec = len(str)
+ }
+ val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
+ if err != nil {
+ iter.Error = err
+ return nil
+ }
+ return val
+}
+
+// ReadBigInt read big.Int
+func (iter *Iterator) ReadBigInt() (ret *big.Int) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return nil
+ }
+ ret = big.NewInt(0)
+ var success bool
+ ret, success = ret.SetString(str, 10)
+ if !success {
+ iter.ReportError("ReadBigInt", "invalid big int")
+ return nil
+ }
+ return ret
+}
+
+//ReadFloat32 read float32
+func (iter *Iterator) ReadFloat32() (ret float32) {
+ c := iter.nextToken()
+ if c == '-' {
+ return -iter.readPositiveFloat32()
+ }
+ iter.unreadByte()
+ return iter.readPositiveFloat32()
+}
+
+func (iter *Iterator) readPositiveFloat32() (ret float32) {
+ value := uint64(0)
+ c := byte(' ')
+ i := iter.head
+ // first char
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ c = iter.buf[i]
+ i++
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat32SlowPath()
+ case endOfNumber:
+ iter.ReportError("readFloat32", "empty number")
+ return
+ case dotInNumber:
+ iter.ReportError("readFloat32", "leading dot is invalid")
+ return
+ case 0:
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ c = iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.ReportError("readFloat32", "leading zero is invalid")
+ return
+ }
+ }
+ value = uint64(ind)
+ // chars before dot
+non_decimal_loop:
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat32SlowPath()
+ case endOfNumber:
+ iter.head = i
+ return float32(value)
+ case dotInNumber:
+ break non_decimal_loop
+ }
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat32SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
+ }
+ // chars after dot
+ if c == '.' {
+ i++
+ decimalPlaces := 0
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case endOfNumber:
+ if decimalPlaces > 0 && decimalPlaces < len(pow10) {
+ iter.head = i
+ return float32(float64(value) / float64(pow10[decimalPlaces]))
+ }
+ // too many decimal places
+ return iter.readFloat32SlowPath()
+ case invalidCharForNumber:
+ fallthrough
+ case dotInNumber:
+ return iter.readFloat32SlowPath()
+ }
+ decimalPlaces++
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat32SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ }
+ return iter.readFloat32SlowPath()
+}
+
+func (iter *Iterator) readNumberAsString() (ret string) {
+ strBuf := [16]byte{}
+ str := strBuf[0:0]
+load_loop:
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ str = append(str, c)
+ continue
+ default:
+ iter.head = i
+ break load_loop
+ }
+ }
+ if !iter.loadMore() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ if len(str) == 0 {
+ iter.ReportError("readNumberAsString", "invalid number")
+ }
+ return *(*string)(unsafe.Pointer(&str))
+}
+
+func (iter *Iterator) readFloat32SlowPath() (ret float32) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ errMsg := validateFloat(str)
+ if errMsg != "" {
+ iter.ReportError("readFloat32SlowPath", errMsg)
+ return
+ }
+ val, err := strconv.ParseFloat(str, 32)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ return float32(val)
+}
+
+// ReadFloat64 read float64
+func (iter *Iterator) ReadFloat64() (ret float64) {
+ c := iter.nextToken()
+ if c == '-' {
+ return -iter.readPositiveFloat64()
+ }
+ iter.unreadByte()
+ return iter.readPositiveFloat64()
+}
+
+func (iter *Iterator) readPositiveFloat64() (ret float64) {
+ value := uint64(0)
+ c := byte(' ')
+ i := iter.head
+ // first char
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ c = iter.buf[i]
+ i++
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat64SlowPath()
+ case endOfNumber:
+ iter.ReportError("readFloat64", "empty number")
+ return
+ case dotInNumber:
+ iter.ReportError("readFloat64", "leading dot is invalid")
+ return
+ case 0:
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ c = iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.ReportError("readFloat64", "leading zero is invalid")
+ return
+ }
+ }
+ value = uint64(ind)
+ // chars before dot
+non_decimal_loop:
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat64SlowPath()
+ case endOfNumber:
+ iter.head = i
+ return float64(value)
+ case dotInNumber:
+ break non_decimal_loop
+ }
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat64SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
+ }
+ // chars after dot
+ if c == '.' {
+ i++
+ decimalPlaces := 0
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case endOfNumber:
+ if decimalPlaces > 0 && decimalPlaces < len(pow10) {
+ iter.head = i
+ return float64(value) / float64(pow10[decimalPlaces])
+ }
+ // too many decimal places
+ return iter.readFloat64SlowPath()
+ case invalidCharForNumber:
+ fallthrough
+ case dotInNumber:
+ return iter.readFloat64SlowPath()
+ }
+ decimalPlaces++
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat64SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ }
+ return iter.readFloat64SlowPath()
+}
+
+func (iter *Iterator) readFloat64SlowPath() (ret float64) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ errMsg := validateFloat(str)
+ if errMsg != "" {
+ iter.ReportError("readFloat64SlowPath", errMsg)
+ return
+ }
+ val, err := strconv.ParseFloat(str, 64)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ return val
+}
+
+func validateFloat(str string) string {
+ // strconv.ParseFloat is not validating `1.` or `1.e1`
+ if len(str) == 0 {
+ return "empty number"
+ }
+ if str[0] == '-' {
+ return "-- is not valid"
+ }
+ dotPos := strings.IndexByte(str, '.')
+ if dotPos != -1 {
+ if dotPos == len(str)-1 {
+ return "dot can not be last character"
+ }
+ switch str[dotPos+1] {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ default:
+ return "missing digit after dot"
+ }
+ }
+ return ""
+}
+
+// ReadNumber read json.Number
+func (iter *Iterator) ReadNumber() (ret json.Number) {
+ return json.Number(iter.readNumberAsString())
+}
diff --git a/vendor/github.com/json-iterator/go/iter_int.go b/vendor/github.com/json-iterator/go/iter_int.go
new file mode 100644
index 000000000..214232035
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_int.go
@@ -0,0 +1,345 @@
+package jsoniter
+
+import (
+ "math"
+ "strconv"
+)
+
+var intDigits []int8
+
+const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
+const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
+
+func init() {
+ intDigits = make([]int8, 256)
+ for i := 0; i < len(intDigits); i++ {
+ intDigits[i] = invalidCharForNumber
+ }
+ for i := int8('0'); i <= int8('9'); i++ {
+ intDigits[i] = i - int8('0')
+ }
+}
+
+// ReadUint read uint
+func (iter *Iterator) ReadUint() uint {
+ if strconv.IntSize == 32 {
+ return uint(iter.ReadUint32())
+ }
+ return uint(iter.ReadUint64())
+}
+
+// ReadInt read int
+func (iter *Iterator) ReadInt() int {
+ if strconv.IntSize == 32 {
+ return int(iter.ReadInt32())
+ }
+ return int(iter.ReadInt64())
+}
+
+// ReadInt8 read int8
+func (iter *Iterator) ReadInt8() (ret int8) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt8+1 {
+ iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int8(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt8 {
+ iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int8(val)
+}
+
+// ReadUint8 read uint8
+func (iter *Iterator) ReadUint8() (ret uint8) {
+ val := iter.readUint32(iter.nextToken())
+ if val > math.MaxUint8 {
+ iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return uint8(val)
+}
+
+// ReadInt16 read int16
+func (iter *Iterator) ReadInt16() (ret int16) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt16+1 {
+ iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int16(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt16 {
+ iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int16(val)
+}
+
+// ReadUint16 read uint16
+func (iter *Iterator) ReadUint16() (ret uint16) {
+ val := iter.readUint32(iter.nextToken())
+ if val > math.MaxUint16 {
+ iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return uint16(val)
+}
+
+// ReadInt32 read int32
+func (iter *Iterator) ReadInt32() (ret int32) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt32+1 {
+ iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int32(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt32 {
+ iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int32(val)
+}
+
+// ReadUint32 read uint32
+func (iter *Iterator) ReadUint32() (ret uint32) {
+ return iter.readUint32(iter.nextToken())
+}
+
+func (iter *Iterator) readUint32(c byte) (ret uint32) {
+ ind := intDigits[c]
+ if ind == 0 {
+ iter.assertInteger()
+ return 0 // single zero
+ }
+ if ind == invalidCharForNumber {
+ iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
+ return
+ }
+ value := uint32(ind)
+ if iter.tail-iter.head > 10 {
+ i := iter.head
+ ind2 := intDigits[iter.buf[i]]
+ if ind2 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ i++
+ ind3 := intDigits[iter.buf[i]]
+ if ind3 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10 + uint32(ind2)
+ }
+ //iter.head = i + 1
+ //value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
+ i++
+ ind4 := intDigits[iter.buf[i]]
+ if ind4 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100 + uint32(ind2)*10 + uint32(ind3)
+ }
+ i++
+ ind5 := intDigits[iter.buf[i]]
+ if ind5 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
+ }
+ i++
+ ind6 := intDigits[iter.buf[i]]
+ if ind6 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
+ }
+ i++
+ ind7 := intDigits[iter.buf[i]]
+ if ind7 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
+ }
+ i++
+ ind8 := intDigits[iter.buf[i]]
+ if ind8 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
+ }
+ i++
+ ind9 := intDigits[iter.buf[i]]
+ value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
+ iter.head = i
+ if ind9 == invalidCharForNumber {
+ iter.assertInteger()
+ return value
+ }
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ ind = intDigits[iter.buf[i]]
+ if ind == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ if value > uint32SafeToMultiply10 {
+ value2 := (value << 3) + (value << 1) + uint32(ind)
+ if value2 < value {
+ iter.ReportError("readUint32", "overflow")
+ return
+ }
+ value = value2
+ continue
+ }
+ value = (value << 3) + (value << 1) + uint32(ind)
+ }
+ if !iter.loadMore() {
+ iter.assertInteger()
+ return value
+ }
+ }
+}
+
+// ReadInt64 read int64
+func (iter *Iterator) ReadInt64() (ret int64) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint64(iter.readByte())
+ if val > math.MaxInt64+1 {
+ iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
+ return
+ }
+ return -int64(val)
+ }
+ val := iter.readUint64(c)
+ if val > math.MaxInt64 {
+ iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
+ return
+ }
+ return int64(val)
+}
+
+// ReadUint64 read uint64
+func (iter *Iterator) ReadUint64() uint64 {
+ return iter.readUint64(iter.nextToken())
+}
+
+func (iter *Iterator) readUint64(c byte) (ret uint64) {
+ ind := intDigits[c]
+ if ind == 0 {
+ iter.assertInteger()
+ return 0 // single zero
+ }
+ if ind == invalidCharForNumber {
+ iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
+ return
+ }
+ value := uint64(ind)
+ if iter.tail-iter.head > 10 {
+ i := iter.head
+ ind2 := intDigits[iter.buf[i]]
+ if ind2 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ i++
+ ind3 := intDigits[iter.buf[i]]
+ if ind3 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10 + uint64(ind2)
+ }
+ //iter.head = i + 1
+ //value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
+ i++
+ ind4 := intDigits[iter.buf[i]]
+ if ind4 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100 + uint64(ind2)*10 + uint64(ind3)
+ }
+ i++
+ ind5 := intDigits[iter.buf[i]]
+ if ind5 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
+ }
+ i++
+ ind6 := intDigits[iter.buf[i]]
+ if ind6 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
+ }
+ i++
+ ind7 := intDigits[iter.buf[i]]
+ if ind7 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
+ }
+ i++
+ ind8 := intDigits[iter.buf[i]]
+ if ind8 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
+ }
+ i++
+ ind9 := intDigits[iter.buf[i]]
+ value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
+ iter.head = i
+ if ind9 == invalidCharForNumber {
+ iter.assertInteger()
+ return value
+ }
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ ind = intDigits[iter.buf[i]]
+ if ind == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ if value > uint64SafeToMultiple10 {
+ value2 := (value << 3) + (value << 1) + uint64(ind)
+ if value2 < value {
+ iter.ReportError("readUint64", "overflow")
+ return
+ }
+ value = value2
+ continue
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ if !iter.loadMore() {
+ iter.assertInteger()
+ return value
+ }
+ }
+}
+
+func (iter *Iterator) assertInteger() {
+ if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
+ iter.ReportError("assertInteger", "can not decode float as int")
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/iter_object.go b/vendor/github.com/json-iterator/go/iter_object.go
new file mode 100644
index 000000000..1c5757671
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_object.go
@@ -0,0 +1,251 @@
+package jsoniter
+
+import (
+ "fmt"
+ "strings"
+)
+
+// ReadObject read one field from object.
+// If object ended, returns empty string.
+// Otherwise, returns the field name.
+func (iter *Iterator) ReadObject() (ret string) {
+ c := iter.nextToken()
+ switch c {
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l')
+ return "" // null
+ case '{':
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field := iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ return field
+ }
+ if c == '}' {
+ return "" // end of object
+ }
+ iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
+ return
+ case ',':
+ field := iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ return field
+ case '}':
+ return "" // end of object
+ default:
+ iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
+ return
+ }
+}
+
+// CaseInsensitive
+func (iter *Iterator) readFieldHash() int64 {
+ hash := int64(0x811c9dc5)
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
+ return 0
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ // require ascii string and no escape
+ b := iter.buf[i]
+ if b == '\\' {
+ iter.head = i
+ for _, b := range iter.readStringSlowPath() {
+ if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
+ b += 'a' - 'A'
+ }
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
+ return 0
+ }
+ return hash
+ }
+ if b == '"' {
+ iter.head = i + 1
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
+ return 0
+ }
+ return hash
+ }
+ if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
+ b += 'a' - 'A'
+ }
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ if !iter.loadMore() {
+ iter.ReportError("readFieldHash", `incomplete field name`)
+ return 0
+ }
+ }
+}
+
+func calcHash(str string, caseSensitive bool) int64 {
+ if !caseSensitive {
+ str = strings.ToLower(str)
+ }
+ hash := int64(0x811c9dc5)
+ for _, b := range []byte(str) {
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ return int64(hash)
+}
+
+// ReadObjectCB read object with callback, the key is ascii only and field name not copied
+func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
+ c := iter.nextToken()
+ var field string
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field = iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ field = iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != '}' {
+ iter.ReportError("ReadObjectCB", `object not ended with }`)
+ return false
+ }
+ return true
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
+ return false
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
+ return false
+}
+
+// ReadMapCB read map with callback, the key can be any string
+func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
+ c := iter.nextToken()
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field := iter.ReadString()
+ if iter.nextToken() != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return false
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ field = iter.ReadString()
+ if iter.nextToken() != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return false
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != '}' {
+ iter.ReportError("ReadMapCB", `object not ended with }`)
+ return false
+ }
+ return true
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
+ return false
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
+ return false
+}
+
+func (iter *Iterator) readObjectStart() bool {
+ c := iter.nextToken()
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '}' {
+ return false
+ }
+ iter.unreadByte()
+ return true
+ } else if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return false
+ }
+ iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
+ return false
+}
+
+func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
+ str := iter.ReadStringAsSlice()
+ if iter.skipWhitespacesWithoutLoadMore() {
+ if ret == nil {
+ ret = make([]byte, len(str))
+ copy(ret, str)
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+ if iter.buf[iter.head] != ':' {
+ iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
+ return
+ }
+ iter.head++
+ if iter.skipWhitespacesWithoutLoadMore() {
+ if ret == nil {
+ ret = make([]byte, len(str))
+ copy(ret, str)
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+ if ret == nil {
+ return str
+ }
+ return ret
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip.go b/vendor/github.com/json-iterator/go/iter_skip.go
new file mode 100644
index 000000000..f58beb913
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip.go
@@ -0,0 +1,129 @@
+package jsoniter
+
+import "fmt"
+
+// ReadNil reads a json object as nil and
+// returns whether it's a nil or not
+func (iter *Iterator) ReadNil() (ret bool) {
+ c := iter.nextToken()
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ return true
+ }
+ iter.unreadByte()
+ return false
+}
+
+// ReadBool reads a json object as BoolValue
+func (iter *Iterator) ReadBool() (ret bool) {
+ c := iter.nextToken()
+ if c == 't' {
+ iter.skipThreeBytes('r', 'u', 'e')
+ return true
+ }
+ if c == 'f' {
+ iter.skipFourBytes('a', 'l', 's', 'e')
+ return false
+ }
+ iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
+ return
+}
+
+// SkipAndReturnBytes skip next JSON element, and return its content as []byte.
+// The []byte can be kept, it is a copy of data.
+func (iter *Iterator) SkipAndReturnBytes() []byte {
+ iter.startCapture(iter.head)
+ iter.Skip()
+ return iter.stopCapture()
+}
+
+type captureBuffer struct {
+ startedAt int
+ captured []byte
+}
+
+func (iter *Iterator) startCapture(captureStartedAt int) {
+ if iter.captured != nil {
+ panic("already in capture mode")
+ }
+ iter.captureStartedAt = captureStartedAt
+ iter.captured = make([]byte, 0, 32)
+}
+
+func (iter *Iterator) stopCapture() []byte {
+ if iter.captured == nil {
+ panic("not in capture mode")
+ }
+ captured := iter.captured
+ remaining := iter.buf[iter.captureStartedAt:iter.head]
+ iter.captureStartedAt = -1
+ iter.captured = nil
+ if len(captured) == 0 {
+ copied := make([]byte, len(remaining))
+ copy(copied, remaining)
+ return copied
+ }
+ captured = append(captured, remaining...)
+ return captured
+}
+
+// Skip skips a json object and positions to relatively the next json object
+func (iter *Iterator) Skip() {
+ c := iter.nextToken()
+ switch c {
+ case '"':
+ iter.skipString()
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ case 't':
+ iter.skipThreeBytes('r', 'u', 'e') // true
+ case 'f':
+ iter.skipFourBytes('a', 'l', 's', 'e') // false
+ case '0':
+ iter.unreadByte()
+ iter.ReadFloat32()
+ case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.skipNumber()
+ case '[':
+ iter.skipArray()
+ case '{':
+ iter.skipObject()
+ default:
+ iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
+ return
+ }
+}
+
+func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) {
+ if iter.readByte() != b1 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b2 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b3 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b4 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+}
+
+func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) {
+ if iter.readByte() != b1 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+ if iter.readByte() != b2 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+ if iter.readByte() != b3 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip_sloppy.go b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go
new file mode 100644
index 000000000..8fcdc3b69
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go
@@ -0,0 +1,144 @@
+//+build jsoniter_sloppy
+
+package jsoniter
+
+// sloppy but faster implementation, do not validate the input json
+
+func (iter *Iterator) skipNumber() {
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\r', '\t', ',', '}', ']':
+ iter.head = i
+ return
+ }
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipArray() {
+ level := 1
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ switch iter.buf[i] {
+ case '"': // If inside string, skip it
+ iter.head = i + 1
+ iter.skipString()
+ i = iter.head - 1 // it will be i++ soon
+ case '[': // If open symbol, increase level
+ level++
+ case ']': // If close symbol, increase level
+ level--
+
+ // If we have returned to the original level, we're done
+ if level == 0 {
+ iter.head = i + 1
+ return
+ }
+ }
+ }
+ if !iter.loadMore() {
+ iter.ReportError("skipObject", "incomplete array")
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipObject() {
+ level := 1
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ switch iter.buf[i] {
+ case '"': // If inside string, skip it
+ iter.head = i + 1
+ iter.skipString()
+ i = iter.head - 1 // it will be i++ soon
+ case '{': // If open symbol, increase level
+ level++
+ case '}': // If close symbol, increase level
+ level--
+
+ // If we have returned to the original level, we're done
+ if level == 0 {
+ iter.head = i + 1
+ return
+ }
+ }
+ }
+ if !iter.loadMore() {
+ iter.ReportError("skipObject", "incomplete object")
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipString() {
+ for {
+ end, escaped := iter.findStringEnd()
+ if end == -1 {
+ if !iter.loadMore() {
+ iter.ReportError("skipString", "incomplete string")
+ return
+ }
+ if escaped {
+ iter.head = 1 // skip the first char as last char read is \
+ }
+ } else {
+ iter.head = end
+ return
+ }
+ }
+}
+
+// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go
+// Tries to find the end of string
+// Support if string contains escaped quote symbols.
+func (iter *Iterator) findStringEnd() (int, bool) {
+ escaped := false
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ if !escaped {
+ return i + 1, false
+ }
+ j := i - 1
+ for {
+ if j < iter.head || iter.buf[j] != '\\' {
+ // even number of backslashes
+ // either end of buffer, or " found
+ return i + 1, true
+ }
+ j--
+ if j < iter.head || iter.buf[j] != '\\' {
+ // odd number of backslashes
+ // it is \" or \\\"
+ break
+ }
+ j--
+ }
+ } else if c == '\\' {
+ escaped = true
+ }
+ }
+ j := iter.tail - 1
+ for {
+ if j < iter.head || iter.buf[j] != '\\' {
+ // even number of backslashes
+ // either end of buffer, or " found
+ return -1, false // do not end with \
+ }
+ j--
+ if j < iter.head || iter.buf[j] != '\\' {
+ // odd number of backslashes
+ // it is \" or \\\"
+ break
+ }
+ j--
+
+ }
+ return -1, true // end with \
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip_strict.go b/vendor/github.com/json-iterator/go/iter_skip_strict.go
new file mode 100644
index 000000000..f67bc2e83
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip_strict.go
@@ -0,0 +1,89 @@
+//+build !jsoniter_sloppy
+
+package jsoniter
+
+import "fmt"
+
+func (iter *Iterator) skipNumber() {
+ if !iter.trySkipNumber() {
+ iter.unreadByte()
+ iter.ReadFloat32()
+ }
+}
+
+func (iter *Iterator) trySkipNumber() bool {
+ dotFound := false
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ case '.':
+ if dotFound {
+ iter.ReportError("validateNumber", `more than one dot found in number`)
+ return true // already failed
+ }
+ if i+1 == iter.tail {
+ return false
+ }
+ c = iter.buf[i+1]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ default:
+ iter.ReportError("validateNumber", `missing digit after dot`)
+ return true // already failed
+ }
+ dotFound = true
+ default:
+ switch c {
+ case ',', ']', '}', ' ', '\t', '\n', '\r':
+ if iter.head == i {
+ return false // if - without following digits
+ }
+ iter.head = i
+ return true // must be valid
+ }
+ return false // may be invalid
+ }
+ }
+ return false
+}
+
+func (iter *Iterator) skipString() {
+ if !iter.trySkipString() {
+ iter.unreadByte()
+ iter.ReadString()
+ }
+}
+
+func (iter *Iterator) trySkipString() bool {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ iter.head = i + 1
+ return true // valid
+ } else if c == '\\' {
+ return false
+ } else if c < ' ' {
+ iter.ReportError("trySkipString",
+ fmt.Sprintf(`invalid control character found: %d`, c))
+ return true // already failed
+ }
+ }
+ return false
+}
+
+func (iter *Iterator) skipObject() {
+ iter.unreadByte()
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ return true
+ })
+}
+
+func (iter *Iterator) skipArray() {
+ iter.unreadByte()
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ iter.Skip()
+ return true
+ })
+}
diff --git a/vendor/github.com/json-iterator/go/iter_str.go b/vendor/github.com/json-iterator/go/iter_str.go
new file mode 100644
index 000000000..adc487ea8
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_str.go
@@ -0,0 +1,215 @@
+package jsoniter
+
+import (
+ "fmt"
+ "unicode/utf16"
+)
+
+// ReadString read string from iterator
+func (iter *Iterator) ReadString() (ret string) {
+ c := iter.nextToken()
+ if c == '"' {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ ret = string(iter.buf[iter.head:i])
+ iter.head = i + 1
+ return ret
+ } else if c == '\\' {
+ break
+ } else if c < ' ' {
+ iter.ReportError("ReadString",
+ fmt.Sprintf(`invalid control character found: %d`, c))
+ return
+ }
+ }
+ return iter.readStringSlowPath()
+ } else if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return ""
+ }
+ iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
+ return
+}
+
+func (iter *Iterator) readStringSlowPath() (ret string) {
+ var str []byte
+ var c byte
+ for iter.Error == nil {
+ c = iter.readByte()
+ if c == '"' {
+ return string(str)
+ }
+ if c == '\\' {
+ c = iter.readByte()
+ str = iter.readEscapedChar(c, str)
+ } else {
+ str = append(str, c)
+ }
+ }
+ iter.ReportError("readStringSlowPath", "unexpected end of input")
+ return
+}
+
+func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte {
+ switch c {
+ case 'u':
+ r := iter.readU4()
+ if utf16.IsSurrogate(r) {
+ c = iter.readByte()
+ if iter.Error != nil {
+ return nil
+ }
+ if c != '\\' {
+ iter.unreadByte()
+ str = appendRune(str, r)
+ return str
+ }
+ c = iter.readByte()
+ if iter.Error != nil {
+ return nil
+ }
+ if c != 'u' {
+ str = appendRune(str, r)
+ return iter.readEscapedChar(c, str)
+ }
+ r2 := iter.readU4()
+ if iter.Error != nil {
+ return nil
+ }
+ combined := utf16.DecodeRune(r, r2)
+ if combined == '\uFFFD' {
+ str = appendRune(str, r)
+ str = appendRune(str, r2)
+ } else {
+ str = appendRune(str, combined)
+ }
+ } else {
+ str = appendRune(str, r)
+ }
+ case '"':
+ str = append(str, '"')
+ case '\\':
+ str = append(str, '\\')
+ case '/':
+ str = append(str, '/')
+ case 'b':
+ str = append(str, '\b')
+ case 'f':
+ str = append(str, '\f')
+ case 'n':
+ str = append(str, '\n')
+ case 'r':
+ str = append(str, '\r')
+ case 't':
+ str = append(str, '\t')
+ default:
+ iter.ReportError("readEscapedChar",
+ `invalid escape char after \`)
+ return nil
+ }
+ return str
+}
+
+// ReadStringAsSlice read string from iterator without copying into string form.
+// The []byte can not be kept, as it will change after next iterator call.
+func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
+ c := iter.nextToken()
+ if c == '"' {
+ for i := iter.head; i < iter.tail; i++ {
+ // require ascii string and no escape
+ // for: field name, base64, number
+ if iter.buf[i] == '"' {
+ // fast path: reuse the underlying buffer
+ ret = iter.buf[iter.head:i]
+ iter.head = i + 1
+ return ret
+ }
+ }
+ readLen := iter.tail - iter.head
+ copied := make([]byte, readLen, readLen*2)
+ copy(copied, iter.buf[iter.head:iter.tail])
+ iter.head = iter.tail
+ for iter.Error == nil {
+ c := iter.readByte()
+ if c == '"' {
+ return copied
+ }
+ copied = append(copied, c)
+ }
+ return copied
+ }
+ iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
+ return
+}
+
+func (iter *Iterator) readU4() (ret rune) {
+ for i := 0; i < 4; i++ {
+ c := iter.readByte()
+ if iter.Error != nil {
+ return
+ }
+ if c >= '0' && c <= '9' {
+ ret = ret*16 + rune(c-'0')
+ } else if c >= 'a' && c <= 'f' {
+ ret = ret*16 + rune(c-'a'+10)
+ } else if c >= 'A' && c <= 'F' {
+ ret = ret*16 + rune(c-'A'+10)
+ } else {
+ iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
+ return
+ }
+ }
+ return ret
+}
+
+const (
+ t1 = 0x00 // 0000 0000
+ tx = 0x80 // 1000 0000
+ t2 = 0xC0 // 1100 0000
+ t3 = 0xE0 // 1110 0000
+ t4 = 0xF0 // 1111 0000
+ t5 = 0xF8 // 1111 1000
+
+ maskx = 0x3F // 0011 1111
+ mask2 = 0x1F // 0001 1111
+ mask3 = 0x0F // 0000 1111
+ mask4 = 0x07 // 0000 0111
+
+ rune1Max = 1<<7 - 1
+ rune2Max = 1<<11 - 1
+ rune3Max = 1<<16 - 1
+
+ surrogateMin = 0xD800
+ surrogateMax = 0xDFFF
+
+ maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
+ runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
+)
+
+func appendRune(p []byte, r rune) []byte {
+ // Negative values are erroneous. Making it unsigned addresses the problem.
+ switch i := uint32(r); {
+ case i <= rune1Max:
+ p = append(p, byte(r))
+ return p
+ case i <= rune2Max:
+ p = append(p, t2|byte(r>>6))
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ case i > maxRune, surrogateMin <= i && i <= surrogateMax:
+ r = runeError
+ fallthrough
+ case i <= rune3Max:
+ p = append(p, t3|byte(r>>12))
+ p = append(p, tx|byte(r>>6)&maskx)
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ default:
+ p = append(p, t4|byte(r>>18))
+ p = append(p, tx|byte(r>>12)&maskx)
+ p = append(p, tx|byte(r>>6)&maskx)
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter.go b/vendor/github.com/json-iterator/go/jsoniter.go
new file mode 100644
index 000000000..c2934f916
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter.go
@@ -0,0 +1,18 @@
+// Package jsoniter implements encoding and decoding of JSON as defined in
+// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
+// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
+// and variable type declarations (if any).
+// jsoniter interfaces gives 100% compatibility with code using standard lib.
+//
+// "JSON and Go"
+// (https://golang.org/doc/articles/json_and_go.html)
+// gives a description of how Marshal/Unmarshal operate
+// between arbitrary or predefined json objects and bytes,
+// and it applies to jsoniter.Marshal/Unmarshal as well.
+//
+// Besides, jsoniter.Iterator provides a different set of interfaces
+// iterating given bytes/string/reader
+// and yielding parsed elements one by one.
+// This set of interfaces reads input as required and gives
+// better performance.
+package jsoniter
diff --git a/vendor/github.com/json-iterator/go/pool.go b/vendor/github.com/json-iterator/go/pool.go
new file mode 100644
index 000000000..e2389b56c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/pool.go
@@ -0,0 +1,42 @@
+package jsoniter
+
+import (
+ "io"
+)
+
+// IteratorPool a thread safe pool of iterators with same configuration
+type IteratorPool interface {
+ BorrowIterator(data []byte) *Iterator
+ ReturnIterator(iter *Iterator)
+}
+
+// StreamPool a thread safe pool of streams with same configuration
+type StreamPool interface {
+ BorrowStream(writer io.Writer) *Stream
+ ReturnStream(stream *Stream)
+}
+
+func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
+ stream := cfg.streamPool.Get().(*Stream)
+ stream.Reset(writer)
+ return stream
+}
+
+func (cfg *frozenConfig) ReturnStream(stream *Stream) {
+ stream.out = nil
+ stream.Error = nil
+ stream.Attachment = nil
+ cfg.streamPool.Put(stream)
+}
+
+func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
+ iter := cfg.iteratorPool.Get().(*Iterator)
+ iter.ResetBytes(data)
+ return iter
+}
+
+func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
+ iter.Error = nil
+ iter.Attachment = nil
+ cfg.iteratorPool.Put(iter)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect.go b/vendor/github.com/json-iterator/go/reflect.go
new file mode 100644
index 000000000..4459e203f
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect.go
@@ -0,0 +1,332 @@
+package jsoniter
+
+import (
+ "fmt"
+ "reflect"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+// ValDecoder is an internal type registered to cache as needed.
+// Don't confuse jsoniter.ValDecoder with json.Decoder.
+// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
+//
+// Reflection on type to create decoders, which is then cached
+// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
+// 1. create instance of new value, for example *int will need a int to be allocated
+// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
+// 3. assignment to map, both key and value will be reflect.Value
+// For a simple struct binding, it will be reflect.Value free and allocation free
+type ValDecoder interface {
+ Decode(ptr unsafe.Pointer, iter *Iterator)
+}
+
+// ValEncoder is an internal type registered to cache as needed.
+// Don't confuse jsoniter.ValEncoder with json.Encoder.
+// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
+type ValEncoder interface {
+ IsEmpty(ptr unsafe.Pointer) bool
+ Encode(ptr unsafe.Pointer, stream *Stream)
+}
+
+type checkIsEmpty interface {
+ IsEmpty(ptr unsafe.Pointer) bool
+}
+
+type ctx struct {
+ *frozenConfig
+ prefix string
+ encoders map[reflect2.Type]ValEncoder
+ decoders map[reflect2.Type]ValDecoder
+}
+
+func (b *ctx) caseSensitive() bool {
+ if b.frozenConfig == nil {
+ // default is case-insensitive
+ return false
+ }
+ return b.frozenConfig.caseSensitive
+}
+
+func (b *ctx) append(prefix string) *ctx {
+ return &ctx{
+ frozenConfig: b.frozenConfig,
+ prefix: b.prefix + " " + prefix,
+ encoders: b.encoders,
+ decoders: b.decoders,
+ }
+}
+
+// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
+func (iter *Iterator) ReadVal(obj interface{}) {
+ cacheKey := reflect2.RTypeOf(obj)
+ decoder := iter.cfg.getDecoderFromCache(cacheKey)
+ if decoder == nil {
+ typ := reflect2.TypeOf(obj)
+ if typ.Kind() != reflect.Ptr {
+ iter.ReportError("ReadVal", "can only unmarshal into pointer")
+ return
+ }
+ decoder = iter.cfg.DecoderOf(typ)
+ }
+ ptr := reflect2.PtrOf(obj)
+ if ptr == nil {
+ iter.ReportError("ReadVal", "can not read into nil pointer")
+ return
+ }
+ decoder.Decode(ptr, iter)
+}
+
+// WriteVal copy the go interface into underlying JSON, same as json.Marshal
+func (stream *Stream) WriteVal(val interface{}) {
+ if nil == val {
+ stream.WriteNil()
+ return
+ }
+ cacheKey := reflect2.RTypeOf(val)
+ encoder := stream.cfg.getEncoderFromCache(cacheKey)
+ if encoder == nil {
+ typ := reflect2.TypeOf(val)
+ encoder = stream.cfg.EncoderOf(typ)
+ }
+ encoder.Encode(reflect2.PtrOf(val), stream)
+}
+
+func (cfg *frozenConfig) DecoderOf(typ reflect2.Type) ValDecoder {
+ cacheKey := typ.RType()
+ decoder := cfg.getDecoderFromCache(cacheKey)
+ if decoder != nil {
+ return decoder
+ }
+ ctx := &ctx{
+ frozenConfig: cfg,
+ prefix: "",
+ decoders: map[reflect2.Type]ValDecoder{},
+ encoders: map[reflect2.Type]ValEncoder{},
+ }
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ decoder = decoderOfType(ctx, ptrType.Elem())
+ cfg.addDecoderToCache(cacheKey, decoder)
+ return decoder
+}
+
+func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := getTypeDecoderFromExtension(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfType(ctx, typ)
+ for _, extension := range extensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
+ for _, extension := range ctx.extraExtensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ return decoder
+}
+
+func createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := ctx.decoders[typ]
+ if decoder != nil {
+ return decoder
+ }
+ placeholder := &placeholderDecoder{}
+ ctx.decoders[typ] = placeholder
+ decoder = _createDecoderOfType(ctx, typ)
+ placeholder.decoder = decoder
+ return decoder
+}
+
+func _createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := createDecoderOfJsonRawMessage(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfJsonNumber(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfMarshaler(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfAny(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfNative(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ switch typ.Kind() {
+ case reflect.Interface:
+ ifaceType, isIFace := typ.(*reflect2.UnsafeIFaceType)
+ if isIFace {
+ return &ifaceDecoder{valType: ifaceType}
+ }
+ return &efaceDecoder{}
+ case reflect.Struct:
+ return decoderOfStruct(ctx, typ)
+ case reflect.Array:
+ return decoderOfArray(ctx, typ)
+ case reflect.Slice:
+ return decoderOfSlice(ctx, typ)
+ case reflect.Map:
+ return decoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return decoderOfOptional(ctx, typ)
+ default:
+ return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
+ }
+}
+
+func (cfg *frozenConfig) EncoderOf(typ reflect2.Type) ValEncoder {
+ cacheKey := typ.RType()
+ encoder := cfg.getEncoderFromCache(cacheKey)
+ if encoder != nil {
+ return encoder
+ }
+ ctx := &ctx{
+ frozenConfig: cfg,
+ prefix: "",
+ decoders: map[reflect2.Type]ValDecoder{},
+ encoders: map[reflect2.Type]ValEncoder{},
+ }
+ encoder = encoderOfType(ctx, typ)
+ if typ.LikePtr() {
+ encoder = &onePtrEncoder{encoder}
+ }
+ cfg.addEncoderToCache(cacheKey, encoder)
+ return encoder
+}
+
+type onePtrEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *onePtrEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
+}
+
+func (encoder *onePtrEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
+}
+
+func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := getTypeEncoderFromExtension(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfType(ctx, typ)
+ for _, extension := range extensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
+ for _, extension := range ctx.extraExtensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ return encoder
+}
+
+func createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := ctx.encoders[typ]
+ if encoder != nil {
+ return encoder
+ }
+ placeholder := &placeholderEncoder{}
+ ctx.encoders[typ] = placeholder
+ encoder = _createEncoderOfType(ctx, typ)
+ placeholder.encoder = encoder
+ return encoder
+}
+func _createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := createEncoderOfJsonRawMessage(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfJsonNumber(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfMarshaler(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfAny(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfNative(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ kind := typ.Kind()
+ switch kind {
+ case reflect.Interface:
+ return &dynamicEncoder{typ}
+ case reflect.Struct:
+ return encoderOfStruct(ctx, typ)
+ case reflect.Array:
+ return encoderOfArray(ctx, typ)
+ case reflect.Slice:
+ return encoderOfSlice(ctx, typ)
+ case reflect.Map:
+ return encoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return encoderOfOptional(ctx, typ)
+ default:
+ return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
+ }
+}
+
+type lazyErrorDecoder struct {
+ err error
+}
+
+func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.WhatIsNext() != NilValue {
+ if iter.Error == nil {
+ iter.Error = decoder.err
+ }
+ } else {
+ iter.Skip()
+ }
+}
+
+type lazyErrorEncoder struct {
+ err error
+}
+
+func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if ptr == nil {
+ stream.WriteNil()
+ } else if stream.Error == nil {
+ stream.Error = encoder.err
+ }
+}
+
+func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type placeholderDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.decoder.Decode(ptr, iter)
+}
+
+type placeholderEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(ptr, stream)
+}
+
+func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(ptr)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_array.go b/vendor/github.com/json-iterator/go/reflect_array.go
new file mode 100644
index 000000000..13a0b7b08
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_array.go
@@ -0,0 +1,104 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "unsafe"
+)
+
+func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
+ arrayType := typ.(*reflect2.UnsafeArrayType)
+ decoder := decoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
+ return &arrayDecoder{arrayType, decoder}
+}
+
+func encoderOfArray(ctx *ctx, typ reflect2.Type) ValEncoder {
+ arrayType := typ.(*reflect2.UnsafeArrayType)
+ if arrayType.Len() == 0 {
+ return emptyArrayEncoder{}
+ }
+ encoder := encoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
+ return &arrayEncoder{arrayType, encoder}
+}
+
+type emptyArrayEncoder struct{}
+
+func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteEmptyArray()
+}
+
+func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return true
+}
+
+type arrayEncoder struct {
+ arrayType *reflect2.UnsafeArrayType
+ elemEncoder ValEncoder
+}
+
+func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteArrayStart()
+ elemPtr := unsafe.Pointer(ptr)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ for i := 1; i < encoder.arrayType.Len(); i++ {
+ stream.WriteMore()
+ elemPtr = encoder.arrayType.UnsafeGetIndex(ptr, i)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ }
+ stream.WriteArrayEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error())
+ }
+}
+
+func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type arrayDecoder struct {
+ arrayType *reflect2.UnsafeArrayType
+ elemDecoder ValDecoder
+}
+
+func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.doDecode(ptr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error())
+ }
+}
+
+func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ arrayType := decoder.arrayType
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return
+ }
+ if c != '[' {
+ iter.ReportError("decode array", "expect [ or n, but found "+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == ']' {
+ return
+ }
+ iter.unreadByte()
+ elemPtr := arrayType.UnsafeGetIndex(ptr, 0)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ length := 1
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ if length >= arrayType.Len() {
+ iter.Skip()
+ continue
+ }
+ idx := length
+ length += 1
+ elemPtr = arrayType.UnsafeGetIndex(ptr, idx)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ }
+ if c != ']' {
+ iter.ReportError("decode array", "expect ], but found "+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_dynamic.go b/vendor/github.com/json-iterator/go/reflect_dynamic.go
new file mode 100644
index 000000000..8b6bc8b43
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_dynamic.go
@@ -0,0 +1,70 @@
+package jsoniter
+
+import (
+ "github.com/modern-go/reflect2"
+ "reflect"
+ "unsafe"
+)
+
+type dynamicEncoder struct {
+ valType reflect2.Type
+}
+
+func (encoder *dynamicEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ stream.WriteVal(obj)
+}
+
+func (encoder *dynamicEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.valType.UnsafeIndirect(ptr) == nil
+}
+
+type efaceDecoder struct {
+}
+
+func (decoder *efaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ pObj := (*interface{})(ptr)
+ obj := *pObj
+ if obj == nil {
+ *pObj = iter.Read()
+ return
+ }
+ typ := reflect2.TypeOf(obj)
+ if typ.Kind() != reflect.Ptr {
+ *pObj = iter.Read()
+ return
+ }
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ ptrElemType := ptrType.Elem()
+ if iter.WhatIsNext() == NilValue {
+ if ptrElemType.Kind() != reflect.Ptr {
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *pObj = nil
+ return
+ }
+ }
+ if reflect2.IsNil(obj) {
+ obj := ptrElemType.New()
+ iter.ReadVal(obj)
+ *pObj = obj
+ return
+ }
+ iter.ReadVal(obj)
+}
+
+type ifaceDecoder struct {
+ valType *reflect2.UnsafeIFaceType
+}
+
+func (decoder *ifaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ decoder.valType.UnsafeSet(ptr, decoder.valType.UnsafeNew())
+ return
+ }
+ obj := decoder.valType.UnsafeIndirect(ptr)
+ if reflect2.IsNil(obj) {
+ iter.ReportError("decode non empty interface", "can not unmarshal into nil")
+ return
+ }
+ iter.ReadVal(obj)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_extension.go b/vendor/github.com/json-iterator/go/reflect_extension.go
new file mode 100644
index 000000000..04f68756b
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_extension.go
@@ -0,0 +1,483 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "reflect"
+ "sort"
+ "strings"
+ "unicode"
+ "unsafe"
+)
+
+var typeDecoders = map[string]ValDecoder{}
+var fieldDecoders = map[string]ValDecoder{}
+var typeEncoders = map[string]ValEncoder{}
+var fieldEncoders = map[string]ValEncoder{}
+var extensions = []Extension{}
+
+// StructDescriptor describe how should we encode/decode the struct
+type StructDescriptor struct {
+ Type reflect2.Type
+ Fields []*Binding
+}
+
+// GetField get one field from the descriptor by its name.
+// Can not use map here to keep field orders.
+func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding {
+ for _, binding := range structDescriptor.Fields {
+ if binding.Field.Name() == fieldName {
+ return binding
+ }
+ }
+ return nil
+}
+
+// Binding describe how should we encode/decode the struct field
+type Binding struct {
+ levels []int
+ Field reflect2.StructField
+ FromNames []string
+ ToNames []string
+ Encoder ValEncoder
+ Decoder ValDecoder
+}
+
+// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder.
+// Can also rename fields by UpdateStructDescriptor.
+type Extension interface {
+ UpdateStructDescriptor(structDescriptor *StructDescriptor)
+ CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
+ CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
+ CreateDecoder(typ reflect2.Type) ValDecoder
+ CreateEncoder(typ reflect2.Type) ValEncoder
+ DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
+ DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder
+}
+
+// DummyExtension embed this type get dummy implementation for all methods of Extension
+type DummyExtension struct {
+}
+
+// UpdateStructDescriptor No-op
+func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateMapKeyDecoder No-op
+func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// CreateDecoder No-op
+func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateEncoder No-op
+func (extension *DummyExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension *DummyExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension *DummyExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type EncoderExtension map[reflect2.Type]ValEncoder
+
+// UpdateStructDescriptor No-op
+func (extension EncoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateDecoder No-op
+func (extension EncoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateEncoder get encoder from map
+func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return extension[typ]
+}
+
+// CreateMapKeyDecoder No-op
+func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension EncoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type DecoderExtension map[reflect2.Type]ValDecoder
+
+// UpdateStructDescriptor No-op
+func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateMapKeyDecoder No-op
+func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// CreateDecoder get decoder from map
+func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return extension[typ]
+}
+
+// CreateEncoder No-op
+func (extension DecoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension DecoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension DecoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type funcDecoder struct {
+ fun DecoderFunc
+}
+
+func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.fun(ptr, iter)
+}
+
+type funcEncoder struct {
+ fun EncoderFunc
+ isEmptyFunc func(ptr unsafe.Pointer) bool
+}
+
+func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.fun(ptr, stream)
+}
+
+func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ if encoder.isEmptyFunc == nil {
+ return false
+ }
+ return encoder.isEmptyFunc(ptr)
+}
+
+// DecoderFunc the function form of TypeDecoder
+type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
+
+// EncoderFunc the function form of TypeEncoder
+type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
+
+// RegisterTypeDecoderFunc register TypeDecoder for a type with function
+func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) {
+ typeDecoders[typ] = &funcDecoder{fun}
+}
+
+// RegisterTypeDecoder register TypeDecoder for a typ
+func RegisterTypeDecoder(typ string, decoder ValDecoder) {
+ typeDecoders[typ] = decoder
+}
+
+// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function
+func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) {
+ RegisterFieldDecoder(typ, field, &funcDecoder{fun})
+}
+
+// RegisterFieldDecoder register TypeDecoder for a struct field
+func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) {
+ fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder
+}
+
+// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function
+func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
+ typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc}
+}
+
+// RegisterTypeEncoder register TypeEncoder for a type
+func RegisterTypeEncoder(typ string, encoder ValEncoder) {
+ typeEncoders[typ] = encoder
+}
+
+// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function
+func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
+ RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc})
+}
+
+// RegisterFieldEncoder register TypeEncoder for a struct field
+func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {
+ fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder
+}
+
+// RegisterExtension register extension
+func RegisterExtension(extension Extension) {
+ extensions = append(extensions, extension)
+}
+
+func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := _getTypeDecoderFromExtension(ctx, typ)
+ if decoder != nil {
+ for _, extension := range extensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
+ for _, extension := range ctx.extraExtensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ }
+ return decoder
+}
+func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
+ for _, extension := range extensions {
+ decoder := extension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ decoder := ctx.decoderExtension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ decoder := extension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ typeName := typ.String()
+ decoder = typeDecoders[typeName]
+ if decoder != nil {
+ return decoder
+ }
+ if typ.Kind() == reflect.Ptr {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ decoder := typeDecoders[ptrType.Elem().String()]
+ if decoder != nil {
+ return &OptionalDecoder{ptrType.Elem(), decoder}
+ }
+ }
+ return nil
+}
+
+func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := _getTypeEncoderFromExtension(ctx, typ)
+ if encoder != nil {
+ for _, extension := range extensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
+ for _, extension := range ctx.extraExtensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ }
+ return encoder
+}
+
+func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
+ for _, extension := range extensions {
+ encoder := extension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ encoder := ctx.encoderExtension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ encoder := extension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ typeName := typ.String()
+ encoder = typeEncoders[typeName]
+ if encoder != nil {
+ return encoder
+ }
+ if typ.Kind() == reflect.Ptr {
+ typePtr := typ.(*reflect2.UnsafePtrType)
+ encoder := typeEncoders[typePtr.Elem().String()]
+ if encoder != nil {
+ return &OptionalEncoder{encoder}
+ }
+ }
+ return nil
+}
+
+func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
+ structType := typ.(*reflect2.UnsafeStructType)
+ embeddedBindings := []*Binding{}
+ bindings := []*Binding{}
+ for i := 0; i < structType.NumField(); i++ {
+ field := structType.Field(i)
+ tag, hastag := field.Tag().Lookup(ctx.getTagKey())
+ if ctx.onlyTaggedField && !hastag {
+ continue
+ }
+ tagParts := strings.Split(tag, ",")
+ if tag == "-" {
+ continue
+ }
+ if field.Anonymous() && (tag == "" || tagParts[0] == "") {
+ if field.Type().Kind() == reflect.Struct {
+ structDescriptor := describeStruct(ctx, field.Type())
+ for _, binding := range structDescriptor.Fields {
+ binding.levels = append([]int{i}, binding.levels...)
+ omitempty := binding.Encoder.(*structFieldEncoder).omitempty
+ binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
+ binding.Decoder = &structFieldDecoder{field, binding.Decoder}
+ embeddedBindings = append(embeddedBindings, binding)
+ }
+ continue
+ } else if field.Type().Kind() == reflect.Ptr {
+ ptrType := field.Type().(*reflect2.UnsafePtrType)
+ if ptrType.Elem().Kind() == reflect.Struct {
+ structDescriptor := describeStruct(ctx, ptrType.Elem())
+ for _, binding := range structDescriptor.Fields {
+ binding.levels = append([]int{i}, binding.levels...)
+ omitempty := binding.Encoder.(*structFieldEncoder).omitempty
+ binding.Encoder = &dereferenceEncoder{binding.Encoder}
+ binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
+ binding.Decoder = &dereferenceDecoder{ptrType.Elem(), binding.Decoder}
+ binding.Decoder = &structFieldDecoder{field, binding.Decoder}
+ embeddedBindings = append(embeddedBindings, binding)
+ }
+ continue
+ }
+ }
+ }
+ fieldNames := calcFieldNames(field.Name(), tagParts[0], tag)
+ fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name())
+ decoder := fieldDecoders[fieldCacheKey]
+ if decoder == nil {
+ decoder = decoderOfType(ctx.append(field.Name()), field.Type())
+ }
+ encoder := fieldEncoders[fieldCacheKey]
+ if encoder == nil {
+ encoder = encoderOfType(ctx.append(field.Name()), field.Type())
+ }
+ binding := &Binding{
+ Field: field,
+ FromNames: fieldNames,
+ ToNames: fieldNames,
+ Decoder: decoder,
+ Encoder: encoder,
+ }
+ binding.levels = []int{i}
+ bindings = append(bindings, binding)
+ }
+ return createStructDescriptor(ctx, typ, bindings, embeddedBindings)
+}
+func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
+ structDescriptor := &StructDescriptor{
+ Type: typ,
+ Fields: bindings,
+ }
+ for _, extension := range extensions {
+ extension.UpdateStructDescriptor(structDescriptor)
+ }
+ ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
+ ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
+ for _, extension := range ctx.extraExtensions {
+ extension.UpdateStructDescriptor(structDescriptor)
+ }
+ processTags(structDescriptor, ctx.frozenConfig)
+ // merge normal & embedded bindings & sort with original order
+ allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))
+ sort.Sort(allBindings)
+ structDescriptor.Fields = allBindings
+ return structDescriptor
+}
+
+type sortableBindings []*Binding
+
+func (bindings sortableBindings) Len() int {
+ return len(bindings)
+}
+
+func (bindings sortableBindings) Less(i, j int) bool {
+ left := bindings[i].levels
+ right := bindings[j].levels
+ k := 0
+ for {
+ if left[k] < right[k] {
+ return true
+ } else if left[k] > right[k] {
+ return false
+ }
+ k++
+ }
+}
+
+func (bindings sortableBindings) Swap(i, j int) {
+ bindings[i], bindings[j] = bindings[j], bindings[i]
+}
+
+func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) {
+ for _, binding := range structDescriptor.Fields {
+ shouldOmitEmpty := false
+ tagParts := strings.Split(binding.Field.Tag().Get(cfg.getTagKey()), ",")
+ for _, tagPart := range tagParts[1:] {
+ if tagPart == "omitempty" {
+ shouldOmitEmpty = true
+ } else if tagPart == "string" {
+ if binding.Field.Type().Kind() == reflect.String {
+ binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg}
+ binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg}
+ } else {
+ binding.Decoder = &stringModeNumberDecoder{binding.Decoder}
+ binding.Encoder = &stringModeNumberEncoder{binding.Encoder}
+ }
+ }
+ }
+ binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder}
+ binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty}
+ }
+}
+
+func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
+ // ignore?
+ if wholeTag == "-" {
+ return []string{}
+ }
+ // rename?
+ var fieldNames []string
+ if tagProvidedFieldName == "" {
+ fieldNames = []string{originalFieldName}
+ } else {
+ fieldNames = []string{tagProvidedFieldName}
+ }
+ // private?
+ isNotExported := unicode.IsLower(rune(originalFieldName[0]))
+ if isNotExported {
+ fieldNames = []string{}
+ }
+ return fieldNames
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_json_number.go b/vendor/github.com/json-iterator/go/reflect_json_number.go
new file mode 100644
index 000000000..98d45c1ec
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_json_number.go
@@ -0,0 +1,112 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "github.com/modern-go/reflect2"
+ "strconv"
+ "unsafe"
+)
+
+type Number string
+
+// String returns the literal text of the number.
+func (n Number) String() string { return string(n) }
+
+// Float64 returns the number as a float64.
+func (n Number) Float64() (float64, error) {
+ return strconv.ParseFloat(string(n), 64)
+}
+
+// Int64 returns the number as an int64.
+func (n Number) Int64() (int64, error) {
+ return strconv.ParseInt(string(n), 10, 64)
+}
+
+func CastJsonNumber(val interface{}) (string, bool) {
+ switch typedVal := val.(type) {
+ case json.Number:
+ return string(typedVal), true
+ case Number:
+ return string(typedVal), true
+ }
+ return "", false
+}
+
+var jsonNumberType = reflect2.TypeOfPtr((*json.Number)(nil)).Elem()
+var jsoniterNumberType = reflect2.TypeOfPtr((*Number)(nil)).Elem()
+
+func createDecoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ.AssignableTo(jsonNumberType) {
+ return &jsonNumberCodec{}
+ }
+ if typ.AssignableTo(jsoniterNumberType) {
+ return &jsoniterNumberCodec{}
+ }
+ return nil
+}
+
+func createEncoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ.AssignableTo(jsonNumberType) {
+ return &jsonNumberCodec{}
+ }
+ if typ.AssignableTo(jsoniterNumberType) {
+ return &jsoniterNumberCodec{}
+ }
+ return nil
+}
+
+type jsonNumberCodec struct {
+}
+
+func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case StringValue:
+ *((*json.Number)(ptr)) = json.Number(iter.ReadString())
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *((*json.Number)(ptr)) = ""
+ default:
+ *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
+ }
+}
+
+func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ number := *((*json.Number)(ptr))
+ if len(number) == 0 {
+ stream.writeByte('0')
+ } else {
+ stream.WriteRaw(string(number))
+ }
+}
+
+func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*json.Number)(ptr))) == 0
+}
+
+type jsoniterNumberCodec struct {
+}
+
+func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case StringValue:
+ *((*Number)(ptr)) = Number(iter.ReadString())
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *((*Number)(ptr)) = ""
+ default:
+ *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
+ }
+}
+
+func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ number := *((*Number)(ptr))
+ if len(number) == 0 {
+ stream.writeByte('0')
+ } else {
+ stream.WriteRaw(string(number))
+ }
+}
+
+func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*Number)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_json_raw_message.go b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go
new file mode 100644
index 000000000..f2619936c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go
@@ -0,0 +1,60 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "github.com/modern-go/reflect2"
+ "unsafe"
+)
+
+var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
+var jsoniterRawMessageType = reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()
+
+func createEncoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == jsonRawMessageType {
+ return &jsonRawMessageCodec{}
+ }
+ if typ == jsoniterRawMessageType {
+ return &jsoniterRawMessageCodec{}
+ }
+ return nil
+}
+
+func createDecoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ == jsonRawMessageType {
+ return &jsonRawMessageCodec{}
+ }
+ if typ == jsoniterRawMessageType {
+ return &jsoniterRawMessageCodec{}
+ }
+ return nil
+}
+
+type jsonRawMessageCodec struct {
+}
+
+func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
+}
+
+func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
+}
+
+func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*json.RawMessage)(ptr))) == 0
+}
+
+type jsoniterRawMessageCodec struct {
+}
+
+func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
+}
+
+func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteRaw(string(*((*RawMessage)(ptr))))
+}
+
+func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*RawMessage)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_map.go b/vendor/github.com/json-iterator/go/reflect_map.go
new file mode 100644
index 000000000..7f66a88b0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_map.go
@@ -0,0 +1,326 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "sort"
+ "unsafe"
+)
+
+func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
+ mapType := typ.(*reflect2.UnsafeMapType)
+ keyDecoder := decoderOfMapKey(ctx.append("[mapKey]"), mapType.Key())
+ elemDecoder := decoderOfType(ctx.append("[mapElem]"), mapType.Elem())
+ return &mapDecoder{
+ mapType: mapType,
+ keyType: mapType.Key(),
+ elemType: mapType.Elem(),
+ keyDecoder: keyDecoder,
+ elemDecoder: elemDecoder,
+ }
+}
+
+func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
+ mapType := typ.(*reflect2.UnsafeMapType)
+ if ctx.sortMapKeys {
+ return &sortKeysMapEncoder{
+ mapType: mapType,
+ keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
+ elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
+ }
+ }
+ return &mapEncoder{
+ mapType: mapType,
+ keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
+ elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
+ }
+}
+
+func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ decoder := extension.CreateMapKeyDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ switch typ.Kind() {
+ case reflect.String:
+ return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
+ case reflect.Bool,
+ reflect.Uint8, reflect.Int8,
+ reflect.Uint16, reflect.Int16,
+ reflect.Uint32, reflect.Int32,
+ reflect.Uint64, reflect.Int64,
+ reflect.Uint, reflect.Int,
+ reflect.Float32, reflect.Float64,
+ reflect.Uintptr:
+ typ = reflect2.DefaultTypeOfKind(typ.Kind())
+ return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
+ default:
+ ptrType := reflect2.PtrTo(typ)
+ if ptrType.Implements(textMarshalerType) {
+ return &referenceDecoder{
+ &textUnmarshalerDecoder{
+ valType: ptrType,
+ },
+ }
+ }
+ if typ.Implements(textMarshalerType) {
+ return &textUnmarshalerDecoder{
+ valType: typ,
+ }
+ }
+ return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
+ }
+}
+
+func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ encoder := extension.CreateMapKeyEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ switch typ.Kind() {
+ case reflect.String:
+ return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
+ case reflect.Bool,
+ reflect.Uint8, reflect.Int8,
+ reflect.Uint16, reflect.Int16,
+ reflect.Uint32, reflect.Int32,
+ reflect.Uint64, reflect.Int64,
+ reflect.Uint, reflect.Int,
+ reflect.Float32, reflect.Float64,
+ reflect.Uintptr:
+ typ = reflect2.DefaultTypeOfKind(typ.Kind())
+ return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
+ default:
+ if typ == textMarshalerType {
+ return &directTextMarshalerEncoder{
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ }
+ if typ.Implements(textMarshalerType) {
+ return &textMarshalerEncoder{
+ valType: typ,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ }
+ if typ.Kind() == reflect.Interface {
+ return &dynamicMapKeyEncoder{ctx, typ}
+ }
+ return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
+ }
+}
+
+type mapDecoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyType reflect2.Type
+ elemType reflect2.Type
+ keyDecoder ValDecoder
+ elemDecoder ValDecoder
+}
+
+func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ mapType := decoder.mapType
+ c := iter.nextToken()
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ *(*unsafe.Pointer)(ptr) = nil
+ mapType.UnsafeSet(ptr, mapType.UnsafeNew())
+ return
+ }
+ if mapType.UnsafeIsNil(ptr) {
+ mapType.UnsafeSet(ptr, mapType.UnsafeMakeMap(0))
+ }
+ if c != '{' {
+ iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == '}' {
+ return
+ }
+ if c != '"' {
+ iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
+ return
+ }
+ iter.unreadByte()
+ key := decoder.keyType.UnsafeNew()
+ decoder.keyDecoder.Decode(key, iter)
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return
+ }
+ elem := decoder.elemType.UnsafeNew()
+ decoder.elemDecoder.Decode(elem, iter)
+ decoder.mapType.UnsafeSetIndex(ptr, key, elem)
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ key := decoder.keyType.UnsafeNew()
+ decoder.keyDecoder.Decode(key, iter)
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return
+ }
+ elem := decoder.elemType.UnsafeNew()
+ decoder.elemDecoder.Decode(elem, iter)
+ decoder.mapType.UnsafeSetIndex(ptr, key, elem)
+ }
+ if c != '}' {
+ iter.ReportError("ReadMapCB", `expect }, but found `+string([]byte{c}))
+ }
+}
+
+type numericMapKeyDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *numericMapKeyDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
+ return
+ }
+ decoder.decoder.Decode(ptr, iter)
+ c = iter.nextToken()
+ if c != '"' {
+ iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
+ return
+ }
+}
+
+type numericMapKeyEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *numericMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.writeByte('"')
+ encoder.encoder.Encode(ptr, stream)
+ stream.writeByte('"')
+}
+
+func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type dynamicMapKeyEncoder struct {
+ ctx *ctx
+ valType reflect2.Type
+}
+
+func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
+}
+
+func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
+}
+
+type mapEncoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyEncoder ValEncoder
+ elemEncoder ValEncoder
+}
+
+func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteObjectStart()
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ for i := 0; iter.HasNext(); i++ {
+ if i != 0 {
+ stream.WriteMore()
+ }
+ key, elem := iter.UnsafeNext()
+ encoder.keyEncoder.Encode(key, stream)
+ if stream.indention > 0 {
+ stream.writeTwoBytes(byte(':'), byte(' '))
+ } else {
+ stream.writeByte(':')
+ }
+ encoder.elemEncoder.Encode(elem, stream)
+ }
+ stream.WriteObjectEnd()
+}
+
+func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ return !iter.HasNext()
+}
+
+type sortKeysMapEncoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyEncoder ValEncoder
+ elemEncoder ValEncoder
+}
+
+func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *(*unsafe.Pointer)(ptr) == nil {
+ stream.WriteNil()
+ return
+ }
+ stream.WriteObjectStart()
+ mapIter := encoder.mapType.UnsafeIterate(ptr)
+ subStream := stream.cfg.BorrowStream(nil)
+ subIter := stream.cfg.BorrowIterator(nil)
+ keyValues := encodedKeyValues{}
+ for mapIter.HasNext() {
+ subStream.buf = make([]byte, 0, 64)
+ key, elem := mapIter.UnsafeNext()
+ encoder.keyEncoder.Encode(key, subStream)
+ if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
+ stream.Error = subStream.Error
+ }
+ encodedKey := subStream.Buffer()
+ subIter.ResetBytes(encodedKey)
+ decodedKey := subIter.ReadString()
+ if stream.indention > 0 {
+ subStream.writeTwoBytes(byte(':'), byte(' '))
+ } else {
+ subStream.writeByte(':')
+ }
+ encoder.elemEncoder.Encode(elem, subStream)
+ keyValues = append(keyValues, encodedKV{
+ key: decodedKey,
+ keyValue: subStream.Buffer(),
+ })
+ }
+ sort.Sort(keyValues)
+ for i, keyValue := range keyValues {
+ if i != 0 {
+ stream.WriteMore()
+ }
+ stream.Write(keyValue.keyValue)
+ }
+ stream.WriteObjectEnd()
+ stream.cfg.ReturnStream(subStream)
+ stream.cfg.ReturnIterator(subIter)
+}
+
+func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ return !iter.HasNext()
+}
+
+type encodedKeyValues []encodedKV
+
+type encodedKV struct {
+ key string
+ keyValue []byte
+}
+
+func (sv encodedKeyValues) Len() int { return len(sv) }
+func (sv encodedKeyValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
+func (sv encodedKeyValues) Less(i, j int) bool { return sv[i].key < sv[j].key }
diff --git a/vendor/github.com/json-iterator/go/reflect_marshaler.go b/vendor/github.com/json-iterator/go/reflect_marshaler.go
new file mode 100644
index 000000000..58ac959ad
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_marshaler.go
@@ -0,0 +1,218 @@
+package jsoniter
+
+import (
+ "encoding"
+ "encoding/json"
+ "github.com/modern-go/reflect2"
+ "unsafe"
+)
+
+var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
+var unmarshalerType = reflect2.TypeOfPtr((*json.Unmarshaler)(nil)).Elem()
+var textMarshalerType = reflect2.TypeOfPtr((*encoding.TextMarshaler)(nil)).Elem()
+var textUnmarshalerType = reflect2.TypeOfPtr((*encoding.TextUnmarshaler)(nil)).Elem()
+
+func createDecoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValDecoder {
+ ptrType := reflect2.PtrTo(typ)
+ if ptrType.Implements(unmarshalerType) {
+ return &referenceDecoder{
+ &unmarshalerDecoder{ptrType},
+ }
+ }
+ if ptrType.Implements(textUnmarshalerType) {
+ return &referenceDecoder{
+ &textUnmarshalerDecoder{ptrType},
+ }
+ }
+ return nil
+}
+
+func createEncoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == marshalerType {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &directMarshalerEncoder{
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ if typ.Implements(marshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &marshalerEncoder{
+ valType: typ,
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ ptrType := reflect2.PtrTo(typ)
+ if ctx.prefix != "" && ptrType.Implements(marshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
+ var encoder ValEncoder = &marshalerEncoder{
+ valType: ptrType,
+ checkIsEmpty: checkIsEmpty,
+ }
+ return &referenceEncoder{encoder}
+ }
+ if typ == textMarshalerType {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &directTextMarshalerEncoder{
+ checkIsEmpty: checkIsEmpty,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ return encoder
+ }
+ if typ.Implements(textMarshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &textMarshalerEncoder{
+ valType: typ,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ // if prefix is empty, the type is the root type
+ if ctx.prefix != "" && ptrType.Implements(textMarshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
+ var encoder ValEncoder = &textMarshalerEncoder{
+ valType: ptrType,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ checkIsEmpty: checkIsEmpty,
+ }
+ return &referenceEncoder{encoder}
+ }
+ return nil
+}
+
+type marshalerEncoder struct {
+ checkIsEmpty checkIsEmpty
+ valType reflect2.Type
+}
+
+func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
+ stream.WriteNil()
+ return
+ }
+ marshaler := obj.(json.Marshaler)
+ bytes, err := marshaler.MarshalJSON()
+ if err != nil {
+ stream.Error = err
+ } else {
+ stream.Write(bytes)
+ }
+}
+
+func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type directMarshalerEncoder struct {
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *directMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ marshaler := *(*json.Marshaler)(ptr)
+ if marshaler == nil {
+ stream.WriteNil()
+ return
+ }
+ bytes, err := marshaler.MarshalJSON()
+ if err != nil {
+ stream.Error = err
+ } else {
+ stream.Write(bytes)
+ }
+}
+
+func (encoder *directMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type textMarshalerEncoder struct {
+ valType reflect2.Type
+ stringEncoder ValEncoder
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
+ stream.WriteNil()
+ return
+ }
+ marshaler := (obj).(encoding.TextMarshaler)
+ bytes, err := marshaler.MarshalText()
+ if err != nil {
+ stream.Error = err
+ } else {
+ str := string(bytes)
+ encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
+ }
+}
+
+func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type directTextMarshalerEncoder struct {
+ stringEncoder ValEncoder
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *directTextMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ marshaler := *(*encoding.TextMarshaler)(ptr)
+ if marshaler == nil {
+ stream.WriteNil()
+ return
+ }
+ bytes, err := marshaler.MarshalText()
+ if err != nil {
+ stream.Error = err
+ } else {
+ str := string(bytes)
+ encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
+ }
+}
+
+func (encoder *directTextMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type unmarshalerDecoder struct {
+ valType reflect2.Type
+}
+
+func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valType := decoder.valType
+ obj := valType.UnsafeIndirect(ptr)
+ unmarshaler := obj.(json.Unmarshaler)
+ iter.nextToken()
+ iter.unreadByte() // skip spaces
+ bytes := iter.SkipAndReturnBytes()
+ err := unmarshaler.UnmarshalJSON(bytes)
+ if err != nil {
+ iter.ReportError("unmarshalerDecoder", err.Error())
+ }
+}
+
+type textUnmarshalerDecoder struct {
+ valType reflect2.Type
+}
+
+func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valType := decoder.valType
+ obj := valType.UnsafeIndirect(ptr)
+ if reflect2.IsNil(obj) {
+ ptrType := valType.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ elem := elemType.UnsafeNew()
+ ptrType.UnsafeSet(ptr, unsafe.Pointer(&elem))
+ obj = valType.UnsafeIndirect(ptr)
+ }
+ unmarshaler := (obj).(encoding.TextUnmarshaler)
+ str := iter.ReadString()
+ err := unmarshaler.UnmarshalText([]byte(str))
+ if err != nil {
+ iter.ReportError("textUnmarshalerDecoder", err.Error())
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_native.go b/vendor/github.com/json-iterator/go/reflect_native.go
new file mode 100644
index 000000000..9042eb0cb
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_native.go
@@ -0,0 +1,451 @@
+package jsoniter
+
+import (
+ "encoding/base64"
+ "reflect"
+ "strconv"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+const ptrSize = 32 << uintptr(^uintptr(0)>>63)
+
+func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
+ sliceDecoder := decoderOfSlice(ctx, typ)
+ return &base64Codec{sliceDecoder: sliceDecoder}
+ }
+ typeName := typ.String()
+ kind := typ.Kind()
+ switch kind {
+ case reflect.String:
+ if typeName != "string" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
+ }
+ return &stringCodec{}
+ case reflect.Int:
+ if typeName != "int" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &int32Codec{}
+ }
+ return &int64Codec{}
+ case reflect.Int8:
+ if typeName != "int8" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
+ }
+ return &int8Codec{}
+ case reflect.Int16:
+ if typeName != "int16" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
+ }
+ return &int16Codec{}
+ case reflect.Int32:
+ if typeName != "int32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
+ }
+ return &int32Codec{}
+ case reflect.Int64:
+ if typeName != "int64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
+ }
+ return &int64Codec{}
+ case reflect.Uint:
+ if typeName != "uint" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint8:
+ if typeName != "uint8" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
+ }
+ return &uint8Codec{}
+ case reflect.Uint16:
+ if typeName != "uint16" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
+ }
+ return &uint16Codec{}
+ case reflect.Uint32:
+ if typeName != "uint32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
+ }
+ return &uint32Codec{}
+ case reflect.Uintptr:
+ if typeName != "uintptr" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
+ }
+ if ptrSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint64:
+ if typeName != "uint64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
+ }
+ return &uint64Codec{}
+ case reflect.Float32:
+ if typeName != "float32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
+ }
+ return &float32Codec{}
+ case reflect.Float64:
+ if typeName != "float64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
+ }
+ return &float64Codec{}
+ case reflect.Bool:
+ if typeName != "bool" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
+ }
+ return &boolCodec{}
+ }
+ return nil
+}
+
+func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
+ sliceDecoder := decoderOfSlice(ctx, typ)
+ return &base64Codec{sliceDecoder: sliceDecoder}
+ }
+ typeName := typ.String()
+ switch typ.Kind() {
+ case reflect.String:
+ if typeName != "string" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
+ }
+ return &stringCodec{}
+ case reflect.Int:
+ if typeName != "int" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &int32Codec{}
+ }
+ return &int64Codec{}
+ case reflect.Int8:
+ if typeName != "int8" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
+ }
+ return &int8Codec{}
+ case reflect.Int16:
+ if typeName != "int16" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
+ }
+ return &int16Codec{}
+ case reflect.Int32:
+ if typeName != "int32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
+ }
+ return &int32Codec{}
+ case reflect.Int64:
+ if typeName != "int64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
+ }
+ return &int64Codec{}
+ case reflect.Uint:
+ if typeName != "uint" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint8:
+ if typeName != "uint8" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
+ }
+ return &uint8Codec{}
+ case reflect.Uint16:
+ if typeName != "uint16" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
+ }
+ return &uint16Codec{}
+ case reflect.Uint32:
+ if typeName != "uint32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
+ }
+ return &uint32Codec{}
+ case reflect.Uintptr:
+ if typeName != "uintptr" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
+ }
+ if ptrSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint64:
+ if typeName != "uint64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
+ }
+ return &uint64Codec{}
+ case reflect.Float32:
+ if typeName != "float32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
+ }
+ return &float32Codec{}
+ case reflect.Float64:
+ if typeName != "float64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
+ }
+ return &float64Codec{}
+ case reflect.Bool:
+ if typeName != "bool" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
+ }
+ return &boolCodec{}
+ }
+ return nil
+}
+
+type stringCodec struct {
+}
+
+func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*string)(ptr)) = iter.ReadString()
+}
+
+func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ stream.WriteString(str)
+}
+
+func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*string)(ptr)) == ""
+}
+
+type int8Codec struct {
+}
+
+func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int8)(ptr)) = iter.ReadInt8()
+ }
+}
+
+func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt8(*((*int8)(ptr)))
+}
+
+func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int8)(ptr)) == 0
+}
+
+type int16Codec struct {
+}
+
+func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int16)(ptr)) = iter.ReadInt16()
+ }
+}
+
+func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt16(*((*int16)(ptr)))
+}
+
+func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int16)(ptr)) == 0
+}
+
+type int32Codec struct {
+}
+
+func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int32)(ptr)) = iter.ReadInt32()
+ }
+}
+
+func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt32(*((*int32)(ptr)))
+}
+
+func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int32)(ptr)) == 0
+}
+
+type int64Codec struct {
+}
+
+func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int64)(ptr)) = iter.ReadInt64()
+ }
+}
+
+func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt64(*((*int64)(ptr)))
+}
+
+func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int64)(ptr)) == 0
+}
+
+type uint8Codec struct {
+}
+
+func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint8)(ptr)) = iter.ReadUint8()
+ }
+}
+
+func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint8(*((*uint8)(ptr)))
+}
+
+func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint8)(ptr)) == 0
+}
+
+type uint16Codec struct {
+}
+
+func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint16)(ptr)) = iter.ReadUint16()
+ }
+}
+
+func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint16(*((*uint16)(ptr)))
+}
+
+func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint16)(ptr)) == 0
+}
+
+type uint32Codec struct {
+}
+
+func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint32)(ptr)) = iter.ReadUint32()
+ }
+}
+
+func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint32(*((*uint32)(ptr)))
+}
+
+func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint32)(ptr)) == 0
+}
+
+type uint64Codec struct {
+}
+
+func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint64)(ptr)) = iter.ReadUint64()
+ }
+}
+
+func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint64(*((*uint64)(ptr)))
+}
+
+func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint64)(ptr)) == 0
+}
+
+type float32Codec struct {
+}
+
+func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*float32)(ptr)) = iter.ReadFloat32()
+ }
+}
+
+func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat32(*((*float32)(ptr)))
+}
+
+func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float32)(ptr)) == 0
+}
+
+type float64Codec struct {
+}
+
+func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*float64)(ptr)) = iter.ReadFloat64()
+ }
+}
+
+func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat64(*((*float64)(ptr)))
+}
+
+func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float64)(ptr)) == 0
+}
+
+type boolCodec struct {
+}
+
+func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*bool)(ptr)) = iter.ReadBool()
+ }
+}
+
+func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteBool(*((*bool)(ptr)))
+}
+
+func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return !(*((*bool)(ptr)))
+}
+
+type base64Codec struct {
+ sliceType *reflect2.UnsafeSliceType
+ sliceDecoder ValDecoder
+}
+
+func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ codec.sliceType.UnsafeSetNil(ptr)
+ return
+ }
+ switch iter.WhatIsNext() {
+ case StringValue:
+ src := iter.ReadString()
+ dst, err := base64.StdEncoding.DecodeString(src)
+ if err != nil {
+ iter.ReportError("decode base64", err.Error())
+ } else {
+ codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
+ }
+ case ArrayValue:
+ codec.sliceDecoder.Decode(ptr, iter)
+ default:
+ iter.ReportError("base64Codec", "invalid input")
+ }
+}
+
+func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ src := *((*[]byte)(ptr))
+ if len(src) == 0 {
+ stream.WriteNil()
+ return
+ }
+ encoding := base64.StdEncoding
+ stream.writeByte('"')
+ size := encoding.EncodedLen(len(src))
+ buf := make([]byte, size)
+ encoding.Encode(buf, src)
+ stream.buf = append(stream.buf, buf...)
+ stream.writeByte('"')
+}
+
+func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*[]byte)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_optional.go b/vendor/github.com/json-iterator/go/reflect_optional.go
new file mode 100644
index 000000000..43ec71d6d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_optional.go
@@ -0,0 +1,133 @@
+package jsoniter
+
+import (
+ "github.com/modern-go/reflect2"
+ "reflect"
+ "unsafe"
+)
+
+func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ decoder := decoderOfType(ctx, elemType)
+ if ctx.prefix == "" && elemType.Kind() == reflect.Ptr {
+ return &dereferenceDecoder{elemType, decoder}
+ }
+ return &OptionalDecoder{elemType, decoder}
+}
+
+func encoderOfOptional(ctx *ctx, typ reflect2.Type) ValEncoder {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ elemEncoder := encoderOfType(ctx, elemType)
+ encoder := &OptionalEncoder{elemEncoder}
+ return encoder
+}
+
+type OptionalDecoder struct {
+ ValueType reflect2.Type
+ ValueDecoder ValDecoder
+}
+
+func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ *((*unsafe.Pointer)(ptr)) = nil
+ } else {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ //pointer to null, we have to allocate memory to hold the value
+ newPtr := decoder.ValueType.UnsafeNew()
+ decoder.ValueDecoder.Decode(newPtr, iter)
+ *((*unsafe.Pointer)(ptr)) = newPtr
+ } else {
+ //reuse existing instance
+ decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
+ }
+ }
+}
+
+type dereferenceDecoder struct {
+ // only to deference a pointer
+ valueType reflect2.Type
+ valueDecoder ValDecoder
+}
+
+func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ //pointer to null, we have to allocate memory to hold the value
+ newPtr := decoder.valueType.UnsafeNew()
+ decoder.valueDecoder.Decode(newPtr, iter)
+ *((*unsafe.Pointer)(ptr)) = newPtr
+ } else {
+ //reuse existing instance
+ decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
+ }
+}
+
+type OptionalEncoder struct {
+ ValueEncoder ValEncoder
+}
+
+func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
+ }
+}
+
+func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*unsafe.Pointer)(ptr)) == nil
+}
+
+type dereferenceEncoder struct {
+ ValueEncoder ValEncoder
+}
+
+func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
+ }
+}
+
+func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ dePtr := *((*unsafe.Pointer)(ptr))
+ if dePtr == nil {
+ return true
+ }
+ return encoder.ValueEncoder.IsEmpty(dePtr)
+}
+
+func (encoder *dereferenceEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
+ deReferenced := *((*unsafe.Pointer)(ptr))
+ if deReferenced == nil {
+ return true
+ }
+ isEmbeddedPtrNil, converted := encoder.ValueEncoder.(IsEmbeddedPtrNil)
+ if !converted {
+ return false
+ }
+ fieldPtr := unsafe.Pointer(deReferenced)
+ return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
+}
+
+type referenceEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *referenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
+}
+
+func (encoder *referenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
+}
+
+type referenceDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *referenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.decoder.Decode(unsafe.Pointer(&ptr), iter)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_slice.go b/vendor/github.com/json-iterator/go/reflect_slice.go
new file mode 100644
index 000000000..9441d79df
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_slice.go
@@ -0,0 +1,99 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "unsafe"
+)
+
+func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
+ sliceType := typ.(*reflect2.UnsafeSliceType)
+ decoder := decoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
+ return &sliceDecoder{sliceType, decoder}
+}
+
+func encoderOfSlice(ctx *ctx, typ reflect2.Type) ValEncoder {
+ sliceType := typ.(*reflect2.UnsafeSliceType)
+ encoder := encoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
+ return &sliceEncoder{sliceType, encoder}
+}
+
+type sliceEncoder struct {
+ sliceType *reflect2.UnsafeSliceType
+ elemEncoder ValEncoder
+}
+
+func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if encoder.sliceType.UnsafeIsNil(ptr) {
+ stream.WriteNil()
+ return
+ }
+ length := encoder.sliceType.UnsafeLengthOf(ptr)
+ if length == 0 {
+ stream.WriteEmptyArray()
+ return
+ }
+ stream.WriteArrayStart()
+ encoder.elemEncoder.Encode(encoder.sliceType.UnsafeGetIndex(ptr, 0), stream)
+ for i := 1; i < length; i++ {
+ stream.WriteMore()
+ elemPtr := encoder.sliceType.UnsafeGetIndex(ptr, i)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ }
+ stream.WriteArrayEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
+ }
+}
+
+func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.sliceType.UnsafeLengthOf(ptr) == 0
+}
+
+type sliceDecoder struct {
+ sliceType *reflect2.UnsafeSliceType
+ elemDecoder ValDecoder
+}
+
+func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.doDecode(ptr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
+ }
+}
+
+func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ sliceType := decoder.sliceType
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ sliceType.UnsafeSetNil(ptr)
+ return
+ }
+ if c != '[' {
+ iter.ReportError("decode slice", "expect [ or n, but found "+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == ']' {
+ sliceType.UnsafeSet(ptr, sliceType.UnsafeMakeSlice(0, 0))
+ return
+ }
+ iter.unreadByte()
+ sliceType.UnsafeGrow(ptr, 1)
+ elemPtr := sliceType.UnsafeGetIndex(ptr, 0)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ length := 1
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ idx := length
+ length += 1
+ sliceType.UnsafeGrow(ptr, length)
+ elemPtr = sliceType.UnsafeGetIndex(ptr, idx)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ }
+ if c != ']' {
+ iter.ReportError("decode slice", "expect ], but found "+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_struct_decoder.go b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go
new file mode 100644
index 000000000..355d2d116
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go
@@ -0,0 +1,1048 @@
+package jsoniter
+
+import (
+ "fmt"
+ "io"
+ "strings"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
+ bindings := map[string]*Binding{}
+ structDescriptor := describeStruct(ctx, typ)
+ for _, binding := range structDescriptor.Fields {
+ for _, fromName := range binding.FromNames {
+ old := bindings[fromName]
+ if old == nil {
+ bindings[fromName] = binding
+ continue
+ }
+ ignoreOld, ignoreNew := resolveConflictBinding(ctx.frozenConfig, old, binding)
+ if ignoreOld {
+ delete(bindings, fromName)
+ }
+ if !ignoreNew {
+ bindings[fromName] = binding
+ }
+ }
+ }
+ fields := map[string]*structFieldDecoder{}
+ for k, binding := range bindings {
+ fields[k] = binding.Decoder.(*structFieldDecoder)
+ }
+
+ if !ctx.caseSensitive() {
+ for k, binding := range bindings {
+ if _, found := fields[strings.ToLower(k)]; !found {
+ fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder)
+ }
+ }
+ }
+
+ return createStructDecoder(ctx, typ, fields)
+}
+
+func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structFieldDecoder) ValDecoder {
+ if ctx.disallowUnknownFields {
+ return &generalStructDecoder{typ: typ, fields: fields, disallowUnknownFields: true}
+ }
+ knownHash := map[int64]struct{}{
+ 0: {},
+ }
+
+ switch len(fields) {
+ case 0:
+ return &skipObjectDecoder{typ}
+ case 1:
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}
+ }
+ case 2:
+ var fieldHash1 int64
+ var fieldHash2 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldHash1 == 0 {
+ fieldHash1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else {
+ fieldHash2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ }
+ }
+ return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}
+ case 3:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ }
+ }
+ return &threeFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3}
+ case 4:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ }
+ }
+ return &fourFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4}
+ case 5:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ }
+ }
+ return &fiveFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5}
+ case 6:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ }
+ }
+ return &sixFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6}
+ case 7:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ }
+ }
+ return &sevenFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7}
+ case 8:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ }
+ }
+ return &eightFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8}
+ case 9:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldName9 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ var fieldDecoder9 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else if fieldName8 == 0 {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ } else {
+ fieldName9 = fieldHash
+ fieldDecoder9 = fieldDecoder
+ }
+ }
+ return &nineFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8,
+ fieldName9, fieldDecoder9}
+ case 10:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldName9 int64
+ var fieldName10 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ var fieldDecoder9 *structFieldDecoder
+ var fieldDecoder10 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else if fieldName8 == 0 {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ } else if fieldName9 == 0 {
+ fieldName9 = fieldHash
+ fieldDecoder9 = fieldDecoder
+ } else {
+ fieldName10 = fieldHash
+ fieldDecoder10 = fieldDecoder
+ }
+ }
+ return &tenFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8,
+ fieldName9, fieldDecoder9,
+ fieldName10, fieldDecoder10}
+ }
+ return &generalStructDecoder{typ, fields, false}
+}
+
+type generalStructDecoder struct {
+ typ reflect2.Type
+ fields map[string]*structFieldDecoder
+ disallowUnknownFields bool
+}
+
+func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ var c byte
+ for c = ','; c == ','; c = iter.nextToken() {
+ decoder.decodeOneField(ptr, iter)
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ if c != '}' {
+ iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
+ }
+}
+
+func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
+ var field string
+ var fieldDecoder *structFieldDecoder
+ if iter.cfg.objectFieldMustBeSimpleString {
+ fieldBytes := iter.ReadStringAsSlice()
+ field = *(*string)(unsafe.Pointer(&fieldBytes))
+ fieldDecoder = decoder.fields[field]
+ if fieldDecoder == nil && !iter.cfg.caseSensitive {
+ fieldDecoder = decoder.fields[strings.ToLower(field)]
+ }
+ } else {
+ field = iter.ReadString()
+ fieldDecoder = decoder.fields[field]
+ if fieldDecoder == nil && !iter.cfg.caseSensitive {
+ fieldDecoder = decoder.fields[strings.ToLower(field)]
+ }
+ }
+ if fieldDecoder == nil {
+ msg := "found unknown field: " + field
+ if decoder.disallowUnknownFields {
+ iter.ReportError("ReadObject", msg)
+ }
+ c := iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ iter.Skip()
+ return
+ }
+ c := iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ fieldDecoder.Decode(ptr, iter)
+}
+
+type skipObjectDecoder struct {
+ typ reflect2.Type
+}
+
+func (decoder *skipObjectDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valueType := iter.WhatIsNext()
+ if valueType != ObjectValue && valueType != NilValue {
+ iter.ReportError("skipObjectDecoder", "expect object or null")
+ return
+ }
+ iter.Skip()
+}
+
+type oneFieldStructDecoder struct {
+ typ reflect2.Type
+ fieldHash int64
+ fieldDecoder *structFieldDecoder
+}
+
+func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ if iter.readFieldHash() == decoder.fieldHash {
+ decoder.fieldDecoder.Decode(ptr, iter)
+ } else {
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type twoFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+}
+
+func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type threeFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+}
+
+func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type fourFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+}
+
+func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type fiveFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+}
+
+func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type sixFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+}
+
+func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type sevenFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+}
+
+func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type eightFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+}
+
+func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type nineFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+ fieldHash9 int64
+ fieldDecoder9 *structFieldDecoder
+}
+
+func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ case decoder.fieldHash9:
+ decoder.fieldDecoder9.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type tenFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+ fieldHash9 int64
+ fieldDecoder9 *structFieldDecoder
+ fieldHash10 int64
+ fieldDecoder10 *structFieldDecoder
+}
+
+func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ case decoder.fieldHash9:
+ decoder.fieldDecoder9.Decode(ptr, iter)
+ case decoder.fieldHash10:
+ decoder.fieldDecoder10.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type structFieldDecoder struct {
+ field reflect2.StructField
+ fieldDecoder ValDecoder
+}
+
+func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ fieldPtr := decoder.field.UnsafeGet(ptr)
+ decoder.fieldDecoder.Decode(fieldPtr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%s: %s", decoder.field.Name(), iter.Error.Error())
+ }
+}
+
+type stringModeStringDecoder struct {
+ elemDecoder ValDecoder
+ cfg *frozenConfig
+}
+
+func (decoder *stringModeStringDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.elemDecoder.Decode(ptr, iter)
+ str := *((*string)(ptr))
+ tempIter := decoder.cfg.BorrowIterator([]byte(str))
+ defer decoder.cfg.ReturnIterator(tempIter)
+ *((*string)(ptr)) = tempIter.ReadString()
+}
+
+type stringModeNumberDecoder struct {
+ elemDecoder ValDecoder
+}
+
+func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
+ return
+ }
+ decoder.elemDecoder.Decode(ptr, iter)
+ if iter.Error != nil {
+ return
+ }
+ c = iter.readByte()
+ if c != '"' {
+ iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_struct_encoder.go b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go
new file mode 100644
index 000000000..d0759cf64
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go
@@ -0,0 +1,210 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "unsafe"
+)
+
+func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
+ type bindingTo struct {
+ binding *Binding
+ toName string
+ ignored bool
+ }
+ orderedBindings := []*bindingTo{}
+ structDescriptor := describeStruct(ctx, typ)
+ for _, binding := range structDescriptor.Fields {
+ for _, toName := range binding.ToNames {
+ new := &bindingTo{
+ binding: binding,
+ toName: toName,
+ }
+ for _, old := range orderedBindings {
+ if old.toName != toName {
+ continue
+ }
+ old.ignored, new.ignored = resolveConflictBinding(ctx.frozenConfig, old.binding, new.binding)
+ }
+ orderedBindings = append(orderedBindings, new)
+ }
+ }
+ if len(orderedBindings) == 0 {
+ return &emptyStructEncoder{}
+ }
+ finalOrderedFields := []structFieldTo{}
+ for _, bindingTo := range orderedBindings {
+ if !bindingTo.ignored {
+ finalOrderedFields = append(finalOrderedFields, structFieldTo{
+ encoder: bindingTo.binding.Encoder.(*structFieldEncoder),
+ toName: bindingTo.toName,
+ })
+ }
+ }
+ return &structEncoder{typ, finalOrderedFields}
+}
+
+func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty {
+ encoder := createEncoderOfNative(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ kind := typ.Kind()
+ switch kind {
+ case reflect.Interface:
+ return &dynamicEncoder{typ}
+ case reflect.Struct:
+ return &structEncoder{typ: typ}
+ case reflect.Array:
+ return &arrayEncoder{}
+ case reflect.Slice:
+ return &sliceEncoder{}
+ case reflect.Map:
+ return encoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return &OptionalEncoder{}
+ default:
+ return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)}
+ }
+}
+
+func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
+ newTagged := new.Field.Tag().Get(cfg.getTagKey()) != ""
+ oldTagged := old.Field.Tag().Get(cfg.getTagKey()) != ""
+ if newTagged {
+ if oldTagged {
+ if len(old.levels) > len(new.levels) {
+ return true, false
+ } else if len(new.levels) > len(old.levels) {
+ return false, true
+ } else {
+ return true, true
+ }
+ } else {
+ return true, false
+ }
+ } else {
+ if oldTagged {
+ return true, false
+ }
+ if len(old.levels) > len(new.levels) {
+ return true, false
+ } else if len(new.levels) > len(old.levels) {
+ return false, true
+ } else {
+ return true, true
+ }
+ }
+}
+
+type structFieldEncoder struct {
+ field reflect2.StructField
+ fieldEncoder ValEncoder
+ omitempty bool
+}
+
+func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ encoder.fieldEncoder.Encode(fieldPtr, stream)
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%s: %s", encoder.field.Name(), stream.Error.Error())
+ }
+}
+
+func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ return encoder.fieldEncoder.IsEmpty(fieldPtr)
+}
+
+func (encoder *structFieldEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
+ isEmbeddedPtrNil, converted := encoder.fieldEncoder.(IsEmbeddedPtrNil)
+ if !converted {
+ return false
+ }
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
+}
+
+type IsEmbeddedPtrNil interface {
+ IsEmbeddedPtrNil(ptr unsafe.Pointer) bool
+}
+
+type structEncoder struct {
+ typ reflect2.Type
+ fields []structFieldTo
+}
+
+type structFieldTo struct {
+ encoder *structFieldEncoder
+ toName string
+}
+
+func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteObjectStart()
+ isNotFirst := false
+ for _, field := range encoder.fields {
+ if field.encoder.omitempty && field.encoder.IsEmpty(ptr) {
+ continue
+ }
+ if field.encoder.IsEmbeddedPtrNil(ptr) {
+ continue
+ }
+ if isNotFirst {
+ stream.WriteMore()
+ }
+ stream.WriteObjectField(field.toName)
+ field.encoder.Encode(ptr, stream)
+ isNotFirst = true
+ }
+ stream.WriteObjectEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
+ }
+}
+
+func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type emptyStructEncoder struct {
+}
+
+func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteEmptyObject()
+}
+
+func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type stringModeNumberEncoder struct {
+ elemEncoder ValEncoder
+}
+
+func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.writeByte('"')
+ encoder.elemEncoder.Encode(ptr, stream)
+ stream.writeByte('"')
+}
+
+func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.elemEncoder.IsEmpty(ptr)
+}
+
+type stringModeStringEncoder struct {
+ elemEncoder ValEncoder
+ cfg *frozenConfig
+}
+
+func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ tempStream := encoder.cfg.BorrowStream(nil)
+ defer encoder.cfg.ReturnStream(tempStream)
+ encoder.elemEncoder.Encode(ptr, tempStream)
+ stream.WriteString(string(tempStream.Buffer()))
+}
+
+func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.elemEncoder.IsEmpty(ptr)
+}
diff --git a/vendor/github.com/json-iterator/go/stream.go b/vendor/github.com/json-iterator/go/stream.go
new file mode 100644
index 000000000..17662fded
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream.go
@@ -0,0 +1,211 @@
+package jsoniter
+
+import (
+ "io"
+)
+
+// stream is a io.Writer like object, with JSON specific write functions.
+// Error is not returned as return value, but stored as Error member on this stream instance.
+type Stream struct {
+ cfg *frozenConfig
+ out io.Writer
+ buf []byte
+ Error error
+ indention int
+ Attachment interface{} // open for customized encoder
+}
+
+// NewStream create new stream instance.
+// cfg can be jsoniter.ConfigDefault.
+// out can be nil if write to internal buffer.
+// bufSize is the initial size for the internal buffer in bytes.
+func NewStream(cfg API, out io.Writer, bufSize int) *Stream {
+ return &Stream{
+ cfg: cfg.(*frozenConfig),
+ out: out,
+ buf: make([]byte, 0, bufSize),
+ Error: nil,
+ indention: 0,
+ }
+}
+
+// Pool returns a pool can provide more stream with same configuration
+func (stream *Stream) Pool() StreamPool {
+ return stream.cfg
+}
+
+// Reset reuse this stream instance by assign a new writer
+func (stream *Stream) Reset(out io.Writer) {
+ stream.out = out
+ stream.buf = stream.buf[:0]
+}
+
+// Available returns how many bytes are unused in the buffer.
+func (stream *Stream) Available() int {
+ return cap(stream.buf) - len(stream.buf)
+}
+
+// Buffered returns the number of bytes that have been written into the current buffer.
+func (stream *Stream) Buffered() int {
+ return len(stream.buf)
+}
+
+// Buffer if writer is nil, use this method to take the result
+func (stream *Stream) Buffer() []byte {
+ return stream.buf
+}
+
+// SetBuffer allows to append to the internal buffer directly
+func (stream *Stream) SetBuffer(buf []byte) {
+ stream.buf = buf
+}
+
+// Write writes the contents of p into the buffer.
+// It returns the number of bytes written.
+// If nn < len(p), it also returns an error explaining
+// why the write is short.
+func (stream *Stream) Write(p []byte) (nn int, err error) {
+ stream.buf = append(stream.buf, p...)
+ if stream.out != nil {
+ nn, err = stream.out.Write(stream.buf)
+ stream.buf = stream.buf[nn:]
+ return
+ }
+ return len(p), nil
+}
+
+// WriteByte writes a single byte.
+func (stream *Stream) writeByte(c byte) {
+ stream.buf = append(stream.buf, c)
+}
+
+func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) {
+ stream.buf = append(stream.buf, c1, c2)
+}
+
+func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3)
+}
+
+func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3, c4)
+}
+
+func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3, c4, c5)
+}
+
+// Flush writes any buffered data to the underlying io.Writer.
+func (stream *Stream) Flush() error {
+ if stream.out == nil {
+ return nil
+ }
+ if stream.Error != nil {
+ return stream.Error
+ }
+ n, err := stream.out.Write(stream.buf)
+ if err != nil {
+ if stream.Error == nil {
+ stream.Error = err
+ }
+ return err
+ }
+ stream.buf = stream.buf[n:]
+ return nil
+}
+
+// WriteRaw write string out without quotes, just like []byte
+func (stream *Stream) WriteRaw(s string) {
+ stream.buf = append(stream.buf, s...)
+}
+
+// WriteNil write null to stream
+func (stream *Stream) WriteNil() {
+ stream.writeFourBytes('n', 'u', 'l', 'l')
+}
+
+// WriteTrue write true to stream
+func (stream *Stream) WriteTrue() {
+ stream.writeFourBytes('t', 'r', 'u', 'e')
+}
+
+// WriteFalse write false to stream
+func (stream *Stream) WriteFalse() {
+ stream.writeFiveBytes('f', 'a', 'l', 's', 'e')
+}
+
+// WriteBool write true or false into stream
+func (stream *Stream) WriteBool(val bool) {
+ if val {
+ stream.WriteTrue()
+ } else {
+ stream.WriteFalse()
+ }
+}
+
+// WriteObjectStart write { with possible indention
+func (stream *Stream) WriteObjectStart() {
+ stream.indention += stream.cfg.indentionStep
+ stream.writeByte('{')
+ stream.writeIndention(0)
+}
+
+// WriteObjectField write "field": with possible indention
+func (stream *Stream) WriteObjectField(field string) {
+ stream.WriteString(field)
+ if stream.indention > 0 {
+ stream.writeTwoBytes(':', ' ')
+ } else {
+ stream.writeByte(':')
+ }
+}
+
+// WriteObjectEnd write } with possible indention
+func (stream *Stream) WriteObjectEnd() {
+ stream.writeIndention(stream.cfg.indentionStep)
+ stream.indention -= stream.cfg.indentionStep
+ stream.writeByte('}')
+}
+
+// WriteEmptyObject write {}
+func (stream *Stream) WriteEmptyObject() {
+ stream.writeByte('{')
+ stream.writeByte('}')
+}
+
+// WriteMore write , with possible indention
+func (stream *Stream) WriteMore() {
+ stream.writeByte(',')
+ stream.writeIndention(0)
+ stream.Flush()
+}
+
+// WriteArrayStart write [ with possible indention
+func (stream *Stream) WriteArrayStart() {
+ stream.indention += stream.cfg.indentionStep
+ stream.writeByte('[')
+ stream.writeIndention(0)
+}
+
+// WriteEmptyArray write []
+func (stream *Stream) WriteEmptyArray() {
+ stream.writeTwoBytes('[', ']')
+}
+
+// WriteArrayEnd write ] with possible indention
+func (stream *Stream) WriteArrayEnd() {
+ stream.writeIndention(stream.cfg.indentionStep)
+ stream.indention -= stream.cfg.indentionStep
+ stream.writeByte(']')
+}
+
+func (stream *Stream) writeIndention(delta int) {
+ if stream.indention == 0 {
+ return
+ }
+ stream.writeByte('\n')
+ toWrite := stream.indention - delta
+ for i := 0; i < toWrite; i++ {
+ stream.buf = append(stream.buf, ' ')
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/stream_float.go b/vendor/github.com/json-iterator/go/stream_float.go
new file mode 100644
index 000000000..f318d2c59
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_float.go
@@ -0,0 +1,94 @@
+package jsoniter
+
+import (
+ "math"
+ "strconv"
+)
+
+var pow10 []uint64
+
+func init() {
+ pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
+}
+
+// WriteFloat32 write float32 to stream
+func (stream *Stream) WriteFloat32(val float32) {
+ abs := math.Abs(float64(val))
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if float32(abs) < 1e-6 || float32(abs) >= 1e21 {
+ fmt = 'e'
+ }
+ }
+ stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32)
+}
+
+// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
+func (stream *Stream) WriteFloat32Lossy(val float32) {
+ if val < 0 {
+ stream.writeByte('-')
+ val = -val
+ }
+ if val > 0x4ffffff {
+ stream.WriteFloat32(val)
+ return
+ }
+ precision := 6
+ exp := uint64(1000000) // 6
+ lval := uint64(float64(val)*float64(exp) + 0.5)
+ stream.WriteUint64(lval / exp)
+ fval := lval % exp
+ if fval == 0 {
+ return
+ }
+ stream.writeByte('.')
+ for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
+ stream.writeByte('0')
+ }
+ stream.WriteUint64(fval)
+ for stream.buf[len(stream.buf)-1] == '0' {
+ stream.buf = stream.buf[:len(stream.buf)-1]
+ }
+}
+
+// WriteFloat64 write float64 to stream
+func (stream *Stream) WriteFloat64(val float64) {
+ abs := math.Abs(val)
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if abs < 1e-6 || abs >= 1e21 {
+ fmt = 'e'
+ }
+ }
+ stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64)
+}
+
+// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
+func (stream *Stream) WriteFloat64Lossy(val float64) {
+ if val < 0 {
+ stream.writeByte('-')
+ val = -val
+ }
+ if val > 0x4ffffff {
+ stream.WriteFloat64(val)
+ return
+ }
+ precision := 6
+ exp := uint64(1000000) // 6
+ lval := uint64(val*float64(exp) + 0.5)
+ stream.WriteUint64(lval / exp)
+ fval := lval % exp
+ if fval == 0 {
+ return
+ }
+ stream.writeByte('.')
+ for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
+ stream.writeByte('0')
+ }
+ stream.WriteUint64(fval)
+ for stream.buf[len(stream.buf)-1] == '0' {
+ stream.buf = stream.buf[:len(stream.buf)-1]
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/stream_int.go b/vendor/github.com/json-iterator/go/stream_int.go
new file mode 100644
index 000000000..d1059ee4c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_int.go
@@ -0,0 +1,190 @@
+package jsoniter
+
+var digits []uint32
+
+func init() {
+ digits = make([]uint32, 1000)
+ for i := uint32(0); i < 1000; i++ {
+ digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
+ if i < 10 {
+ digits[i] += 2 << 24
+ } else if i < 100 {
+ digits[i] += 1 << 24
+ }
+ }
+}
+
+func writeFirstBuf(space []byte, v uint32) []byte {
+ start := v >> 24
+ if start == 0 {
+ space = append(space, byte(v>>16), byte(v>>8))
+ } else if start == 1 {
+ space = append(space, byte(v>>8))
+ }
+ space = append(space, byte(v))
+ return space
+}
+
+func writeBuf(buf []byte, v uint32) []byte {
+ return append(buf, byte(v>>16), byte(v>>8), byte(v))
+}
+
+// WriteUint8 write uint8 to stream
+func (stream *Stream) WriteUint8(val uint8) {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+}
+
+// WriteInt8 write int8 to stream
+func (stream *Stream) WriteInt8(nval int8) {
+ var val uint8
+ if nval < 0 {
+ val = uint8(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint8(nval)
+ }
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+}
+
+// WriteUint16 write uint16 to stream
+func (stream *Stream) WriteUint16(val uint16) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+}
+
+// WriteInt16 write int16 to stream
+func (stream *Stream) WriteInt16(nval int16) {
+ var val uint16
+ if nval < 0 {
+ val = uint16(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint16(nval)
+ }
+ stream.WriteUint16(val)
+}
+
+// WriteUint32 write uint32 to stream
+func (stream *Stream) WriteUint32(val uint32) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q2])
+ } else {
+ r3 := q2 - q3*1000
+ stream.buf = append(stream.buf, byte(q3+'0'))
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ }
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+}
+
+// WriteInt32 write int32 to stream
+func (stream *Stream) WriteInt32(nval int32) {
+ var val uint32
+ if nval < 0 {
+ val = uint32(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint32(nval)
+ }
+ stream.WriteUint32(val)
+}
+
+// WriteUint64 write uint64 to stream
+func (stream *Stream) WriteUint64(val uint64) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q2])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r3 := q2 - q3*1000
+ q4 := q3 / 1000
+ if q4 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q3])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r4 := q3 - q4*1000
+ q5 := q4 / 1000
+ if q5 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q4])
+ stream.buf = writeBuf(stream.buf, digits[r4])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r5 := q4 - q5*1000
+ q6 := q5 / 1000
+ if q6 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q5])
+ } else {
+ stream.buf = writeFirstBuf(stream.buf, digits[q6])
+ r6 := q5 - q6*1000
+ stream.buf = writeBuf(stream.buf, digits[r6])
+ }
+ stream.buf = writeBuf(stream.buf, digits[r5])
+ stream.buf = writeBuf(stream.buf, digits[r4])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+}
+
+// WriteInt64 write int64 to stream
+func (stream *Stream) WriteInt64(nval int64) {
+ var val uint64
+ if nval < 0 {
+ val = uint64(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint64(nval)
+ }
+ stream.WriteUint64(val)
+}
+
+// WriteInt write int to stream
+func (stream *Stream) WriteInt(val int) {
+ stream.WriteInt64(int64(val))
+}
+
+// WriteUint write uint to stream
+func (stream *Stream) WriteUint(val uint) {
+ stream.WriteUint64(uint64(val))
+}
diff --git a/vendor/github.com/json-iterator/go/stream_str.go b/vendor/github.com/json-iterator/go/stream_str.go
new file mode 100644
index 000000000..54c2ba0b3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_str.go
@@ -0,0 +1,372 @@
+package jsoniter
+
+import (
+ "unicode/utf8"
+)
+
+// htmlSafeSet holds the value true if the ASCII character with the given
+// array position can be safely represented inside a JSON string, embedded
+// inside of HTML