Limit jstream parse depth (#20474)

Add https://github.com/bcicen/jstream/pull/15 by vendoring the package.

Sets JSON depth limit to 100 entries in S3 Select.
This commit is contained in:
Klaus Post
2024-09-23 12:35:41 -07:00
committed by GitHub
parent 03e996320e
commit 974cbb3bb7
21 changed files with 1484 additions and 19 deletions

View File

@@ -24,7 +24,7 @@ import (
"runtime"
"sync"
"github.com/bcicen/jstream"
"github.com/minio/minio/internal/s3select/jstream"
"github.com/minio/minio/internal/s3select/sql"
)
@@ -185,7 +185,7 @@ func (r *PReader) startReaders() {
dst = make([]jstream.KVS, 0, 1000)
}
d := jstream.NewDecoder(bytes.NewBuffer(in.input), 0).ObjectAsKVS()
d := jstream.NewDecoder(bytes.NewBuffer(in.input), 0).ObjectAsKVS().MaxDepth(100)
stream := d.Stream()
all := dst[:0]
for mv := range stream {

View File

@@ -21,9 +21,8 @@ import (
"io"
"sync"
"github.com/minio/minio/internal/s3select/jstream"
"github.com/minio/minio/internal/s3select/sql"
"github.com/bcicen/jstream"
)
// Limit single document size to 10MiB, 10x the AWS limit:
@@ -84,7 +83,7 @@ func (r *Reader) Close() error {
// NewReader - creates new JSON reader using readCloser.
func NewReader(readCloser io.ReadCloser, args *ReaderArgs) *Reader {
readCloser = &syncReadCloser{rc: readCloser}
d := jstream.NewDecoder(io.LimitReader(readCloser, maxDocumentSize), 0).ObjectAsKVS()
d := jstream.NewDecoder(io.LimitReader(readCloser, maxDocumentSize), 0).ObjectAsKVS().MaxDepth(100)
return &Reader{
args: args,
decoder: d,

View File

@@ -26,8 +26,8 @@ import (
"strconv"
"strings"
"github.com/bcicen/jstream"
csv "github.com/minio/csvparser"
"github.com/minio/minio/internal/s3select/jstream"
"github.com/minio/minio/internal/s3select/sql"
)