mirror of
https://github.com/minio/minio.git
synced 2025-04-20 02:27:50 -04:00
move argon2,csvparser into their repos
This commit is contained in:
parent
c8050bc079
commit
7cd6f89c4b
@ -1,333 +0,0 @@
|
|||||||
// Copyright 2017 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the https://golang.org/LICENSE file.
|
|
||||||
|
|
||||||
// Package argon2 implements the key derivation function Argon2.
|
|
||||||
// Argon2 was selected as the winner of the Password Hashing Competition and can
|
|
||||||
// be used to derive cryptographic keys from passwords.
|
|
||||||
//
|
|
||||||
// For a detailed specification of Argon2 see [1].
|
|
||||||
//
|
|
||||||
// If you aren't sure which function you need, use Argon2id (IDKey) and
|
|
||||||
// the parameter recommendations for your scenario.
|
|
||||||
//
|
|
||||||
//
|
|
||||||
// Argon2i
|
|
||||||
//
|
|
||||||
// Argon2i (implemented by Key) is the side-channel resistant version of Argon2.
|
|
||||||
// It uses data-independent memory access, which is preferred for password
|
|
||||||
// hashing and password-based key derivation. Argon2i requires more passes over
|
|
||||||
// memory than Argon2id to protect from trade-off attacks. The recommended
|
|
||||||
// parameters (taken from [2]) for non-interactive operations are time=3 and to
|
|
||||||
// use the maximum available memory.
|
|
||||||
//
|
|
||||||
//
|
|
||||||
// Argon2id
|
|
||||||
//
|
|
||||||
// Argon2id (implemented by IDKey) is a hybrid version of Argon2 combining
|
|
||||||
// Argon2i and Argon2d. It uses data-independent memory access for the first
|
|
||||||
// half of the first iteration over the memory and data-dependent memory access
|
|
||||||
// for the rest. Argon2id is side-channel resistant and provides better brute-
|
|
||||||
// force cost savings due to time-memory tradeoffs than Argon2i. The recommended
|
|
||||||
// parameters for non-interactive operations (taken from [2]) are time=1 and to
|
|
||||||
// use the maximum available memory.
|
|
||||||
//
|
|
||||||
// [1] https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf
|
|
||||||
// [2] https://tools.ietf.org/html/draft-irtf-cfrg-argon2-03#section-9.3
|
|
||||||
//
|
|
||||||
// Modified to be used with MinIO. Modification here specifically adds
|
|
||||||
// sync.Pool reusable buffers to avoid large memory build up with frequent
|
|
||||||
// allocations done by memory hard PBKDF.
|
|
||||||
package argon2
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/binary"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"golang.org/x/crypto/blake2b"
|
|
||||||
)
|
|
||||||
|
|
||||||
// The Argon2 version implemented by this package.
|
|
||||||
const Version = 0x13
|
|
||||||
|
|
||||||
const (
|
|
||||||
argon2d = iota
|
|
||||||
argon2i
|
|
||||||
argon2id
|
|
||||||
)
|
|
||||||
|
|
||||||
// Key derives a key from the password, salt, and cost parameters using Argon2i
|
|
||||||
// returning a byte slice of length keyLen that can be used as cryptographic
|
|
||||||
// key. The CPU cost and parallelism degree must be greater than zero.
|
|
||||||
//
|
|
||||||
// For example, you can get a derived key for e.g. AES-256 (which needs a
|
|
||||||
// 32-byte key) by doing:
|
|
||||||
//
|
|
||||||
// key := argon2.Key([]byte("some password"), salt, 3, 32*1024, 4, 32)
|
|
||||||
//
|
|
||||||
// The draft RFC recommends[2] time=3, and memory=32*1024 is a sensible number.
|
|
||||||
// If using that amount of memory (32 MB) is not possible in some contexts then
|
|
||||||
// the time parameter can be increased to compensate.
|
|
||||||
//
|
|
||||||
// The time parameter specifies the number of passes over the memory and the
|
|
||||||
// memory parameter specifies the size of the memory in KiB. For example
|
|
||||||
// memory=32*1024 sets the memory cost to ~32 MB. The number of threads can be
|
|
||||||
// adjusted to the number of available CPUs. The cost parameters should be
|
|
||||||
// increased as memory latency and CPU parallelism increases. Remember to get a
|
|
||||||
// good random salt.
|
|
||||||
func Key(password, salt []byte, time, memory uint32, threads uint8, keyLen uint32) []byte {
|
|
||||||
return deriveKey(argon2i, password, salt, nil, nil, time, memory, threads, keyLen)
|
|
||||||
}
|
|
||||||
|
|
||||||
// IDKey derives a key from the password, salt, and cost parameters using
|
|
||||||
// Argon2id returning a byte slice of length keyLen that can be used as
|
|
||||||
// cryptographic key. The CPU cost and parallelism degree must be greater than
|
|
||||||
// zero.
|
|
||||||
//
|
|
||||||
// For example, you can get a derived key for e.g. AES-256 (which needs a
|
|
||||||
// 32-byte key) by doing:
|
|
||||||
//
|
|
||||||
// key := argon2.IDKey([]byte("some password"), salt, 1, 64*1024, 4, 32)
|
|
||||||
//
|
|
||||||
// The draft RFC recommends[2] time=1, and memory=64*1024 is a sensible number.
|
|
||||||
// If using that amount of memory (64 MB) is not possible in some contexts then
|
|
||||||
// the time parameter can be increased to compensate.
|
|
||||||
//
|
|
||||||
// The time parameter specifies the number of passes over the memory and the
|
|
||||||
// memory parameter specifies the size of the memory in KiB. For example
|
|
||||||
// memory=64*1024 sets the memory cost to ~64 MB. The number of threads can be
|
|
||||||
// adjusted to the numbers of available CPUs. The cost parameters should be
|
|
||||||
// increased as memory latency and CPU parallelism increases. Remember to get a
|
|
||||||
// good random salt.
|
|
||||||
func IDKey(password, salt []byte, time, memory uint32, threads uint8, keyLen uint32) []byte {
|
|
||||||
return deriveKey(argon2id, password, salt, nil, nil, time, memory, threads, keyLen)
|
|
||||||
}
|
|
||||||
|
|
||||||
func clearBlocks(B []block) {
|
|
||||||
for i := range B {
|
|
||||||
B[i] = block{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIDKey returns an argon2 PBKDF backend by sync.Pool
|
|
||||||
func NewIDKey(time, memory uint32, threads uint8) func([]byte, []byte, []byte, []byte, uint32) []byte {
|
|
||||||
if time < 1 {
|
|
||||||
panic("argon2: number of rounds too small")
|
|
||||||
}
|
|
||||||
if threads < 1 {
|
|
||||||
panic("argon2: parallelism degree too low")
|
|
||||||
}
|
|
||||||
|
|
||||||
hashMemory := memory
|
|
||||||
|
|
||||||
memory = memory / (syncPoints * uint32(threads)) * (syncPoints * uint32(threads))
|
|
||||||
if memory < 2*syncPoints*uint32(threads) {
|
|
||||||
memory = 2 * syncPoints * uint32(threads)
|
|
||||||
}
|
|
||||||
|
|
||||||
pool := sync.Pool{
|
|
||||||
New: func() interface{} {
|
|
||||||
b := make([]block, memory)
|
|
||||||
return &b
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(password, salt, secret, data []byte, keyLen uint32) []byte {
|
|
||||||
B := pool.Get().(*[]block)
|
|
||||||
defer func() {
|
|
||||||
clearBlocks(*B)
|
|
||||||
pool.Put(B)
|
|
||||||
}()
|
|
||||||
|
|
||||||
h0 := initHash(password, salt, secret, data, time, hashMemory, uint32(threads), keyLen, argon2id)
|
|
||||||
B1 := initBlocks(&h0, *B, uint32(threads))
|
|
||||||
processBlocks(B1, time, memory, uint32(threads), argon2id)
|
|
||||||
return extractKey(B1, memory, uint32(threads), keyLen)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func deriveKey(mode int, password, salt, secret, data []byte, time, memory uint32, threads uint8, keyLen uint32) []byte {
|
|
||||||
if time < 1 {
|
|
||||||
panic("argon2: number of rounds too small")
|
|
||||||
}
|
|
||||||
if threads < 1 {
|
|
||||||
panic("argon2: parallelism degree too low")
|
|
||||||
}
|
|
||||||
h0 := initHash(password, salt, secret, data, time, memory, uint32(threads), keyLen, mode)
|
|
||||||
|
|
||||||
memory = memory / (syncPoints * uint32(threads)) * (syncPoints * uint32(threads))
|
|
||||||
if memory < 2*syncPoints*uint32(threads) {
|
|
||||||
memory = 2 * syncPoints * uint32(threads)
|
|
||||||
}
|
|
||||||
B := make([]block, memory)
|
|
||||||
B = initBlocks(&h0, B, uint32(threads))
|
|
||||||
processBlocks(B, time, memory, uint32(threads), mode)
|
|
||||||
return extractKey(B, memory, uint32(threads), keyLen)
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
blockLength = 128
|
|
||||||
syncPoints = 4
|
|
||||||
)
|
|
||||||
|
|
||||||
type block [blockLength]uint64
|
|
||||||
|
|
||||||
func initHash(password, salt, key, data []byte, time, memory, threads, keyLen uint32, mode int) [blake2b.Size + 8]byte {
|
|
||||||
var (
|
|
||||||
h0 [blake2b.Size + 8]byte
|
|
||||||
params [24]byte
|
|
||||||
tmp [4]byte
|
|
||||||
)
|
|
||||||
|
|
||||||
b2, _ := blake2b.New512(nil)
|
|
||||||
binary.LittleEndian.PutUint32(params[0:4], threads)
|
|
||||||
binary.LittleEndian.PutUint32(params[4:8], keyLen)
|
|
||||||
binary.LittleEndian.PutUint32(params[8:12], memory)
|
|
||||||
binary.LittleEndian.PutUint32(params[12:16], time)
|
|
||||||
binary.LittleEndian.PutUint32(params[16:20], uint32(Version))
|
|
||||||
binary.LittleEndian.PutUint32(params[20:24], uint32(mode))
|
|
||||||
b2.Write(params[:])
|
|
||||||
binary.LittleEndian.PutUint32(tmp[:], uint32(len(password)))
|
|
||||||
b2.Write(tmp[:])
|
|
||||||
b2.Write(password)
|
|
||||||
binary.LittleEndian.PutUint32(tmp[:], uint32(len(salt)))
|
|
||||||
b2.Write(tmp[:])
|
|
||||||
b2.Write(salt)
|
|
||||||
binary.LittleEndian.PutUint32(tmp[:], uint32(len(key)))
|
|
||||||
b2.Write(tmp[:])
|
|
||||||
b2.Write(key)
|
|
||||||
binary.LittleEndian.PutUint32(tmp[:], uint32(len(data)))
|
|
||||||
b2.Write(tmp[:])
|
|
||||||
b2.Write(data)
|
|
||||||
b2.Sum(h0[:0])
|
|
||||||
return h0
|
|
||||||
}
|
|
||||||
|
|
||||||
func initBlocks(h0 *[blake2b.Size + 8]byte, blocks []block, threads uint32) []block {
|
|
||||||
var block0 [1024]byte
|
|
||||||
B := blocks
|
|
||||||
for lane := uint32(0); lane < threads; lane++ {
|
|
||||||
j := lane * (uint32(len(B)) / threads)
|
|
||||||
binary.LittleEndian.PutUint32(h0[blake2b.Size+4:], lane)
|
|
||||||
|
|
||||||
binary.LittleEndian.PutUint32(h0[blake2b.Size:], 0)
|
|
||||||
blake2bHash(block0[:], h0[:])
|
|
||||||
for i := range B[j+0] {
|
|
||||||
B[j+0][i] = binary.LittleEndian.Uint64(block0[i*8:])
|
|
||||||
}
|
|
||||||
|
|
||||||
binary.LittleEndian.PutUint32(h0[blake2b.Size:], 1)
|
|
||||||
blake2bHash(block0[:], h0[:])
|
|
||||||
for i := range B[j+1] {
|
|
||||||
B[j+1][i] = binary.LittleEndian.Uint64(block0[i*8:])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return B
|
|
||||||
}
|
|
||||||
|
|
||||||
func processBlocks(B []block, time, memory, threads uint32, mode int) {
|
|
||||||
lanes := memory / threads
|
|
||||||
segments := lanes / syncPoints
|
|
||||||
|
|
||||||
processSegment := func(n, slice, lane uint32, wg *sync.WaitGroup) {
|
|
||||||
var addresses, in, zero block
|
|
||||||
if mode == argon2i || (mode == argon2id && n == 0 && slice < syncPoints/2) {
|
|
||||||
in[0] = uint64(n)
|
|
||||||
in[1] = uint64(lane)
|
|
||||||
in[2] = uint64(slice)
|
|
||||||
in[3] = uint64(memory)
|
|
||||||
in[4] = uint64(time)
|
|
||||||
in[5] = uint64(mode)
|
|
||||||
}
|
|
||||||
|
|
||||||
index := uint32(0)
|
|
||||||
if n == 0 && slice == 0 {
|
|
||||||
index = 2 // we have already generated the first two blocks
|
|
||||||
if mode == argon2i || mode == argon2id {
|
|
||||||
in[6]++
|
|
||||||
processBlock(&addresses, &in, &zero)
|
|
||||||
processBlock(&addresses, &addresses, &zero)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
offset := lane*lanes + slice*segments + index
|
|
||||||
var random uint64
|
|
||||||
for index < segments {
|
|
||||||
prev := offset - 1
|
|
||||||
if index == 0 && slice == 0 {
|
|
||||||
prev += lanes // last block in lane
|
|
||||||
}
|
|
||||||
if mode == argon2i || (mode == argon2id && n == 0 && slice < syncPoints/2) {
|
|
||||||
if index%blockLength == 0 {
|
|
||||||
in[6]++
|
|
||||||
processBlock(&addresses, &in, &zero)
|
|
||||||
processBlock(&addresses, &addresses, &zero)
|
|
||||||
}
|
|
||||||
random = addresses[index%blockLength]
|
|
||||||
} else {
|
|
||||||
random = B[prev][0]
|
|
||||||
}
|
|
||||||
newOffset := indexAlpha(random, lanes, segments, threads, n, slice, lane, index)
|
|
||||||
processBlockXOR(&B[offset], &B[prev], &B[newOffset])
|
|
||||||
index, offset = index+1, offset+1
|
|
||||||
}
|
|
||||||
wg.Done()
|
|
||||||
}
|
|
||||||
|
|
||||||
for n := uint32(0); n < time; n++ {
|
|
||||||
for slice := uint32(0); slice < syncPoints; slice++ {
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
for lane := uint32(0); lane < threads; lane++ {
|
|
||||||
wg.Add(1)
|
|
||||||
go processSegment(n, slice, lane, &wg)
|
|
||||||
}
|
|
||||||
wg.Wait()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func extractKey(B []block, memory, threads, keyLen uint32) []byte {
|
|
||||||
lanes := memory / threads
|
|
||||||
for lane := uint32(0); lane < threads-1; lane++ {
|
|
||||||
for i, v := range B[(lane*lanes)+lanes-1] {
|
|
||||||
B[memory-1][i] ^= v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var block [1024]byte
|
|
||||||
for i, v := range B[memory-1] {
|
|
||||||
binary.LittleEndian.PutUint64(block[i*8:], v)
|
|
||||||
}
|
|
||||||
key := make([]byte, keyLen)
|
|
||||||
blake2bHash(key, block[:])
|
|
||||||
return key
|
|
||||||
}
|
|
||||||
|
|
||||||
func indexAlpha(rand uint64, lanes, segments, threads, n, slice, lane, index uint32) uint32 {
|
|
||||||
refLane := uint32(rand>>32) % threads
|
|
||||||
if n == 0 && slice == 0 {
|
|
||||||
refLane = lane
|
|
||||||
}
|
|
||||||
m, s := 3*segments, ((slice+1)%syncPoints)*segments
|
|
||||||
if lane == refLane {
|
|
||||||
m += index
|
|
||||||
}
|
|
||||||
if n == 0 {
|
|
||||||
m, s = slice*segments, 0
|
|
||||||
if slice == 0 || lane == refLane {
|
|
||||||
m += index
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if index == 0 || lane == refLane {
|
|
||||||
m--
|
|
||||||
}
|
|
||||||
return phi(rand, uint64(m), uint64(s), refLane, lanes)
|
|
||||||
}
|
|
||||||
|
|
||||||
func phi(rand, m, s uint64, lane, lanes uint32) uint32 {
|
|
||||||
p := rand & 0xFFFFFFFF
|
|
||||||
p = (p * p) >> 32
|
|
||||||
p = (p * m) >> 32
|
|
||||||
return lane*lanes + uint32((s+m-(p+1))%uint64(lanes))
|
|
||||||
}
|
|
@ -1,280 +0,0 @@
|
|||||||
// Copyright 2017 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package argon2
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/hex"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
genKatPassword = []byte{
|
|
||||||
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
|
|
||||||
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
|
|
||||||
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
|
|
||||||
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
|
|
||||||
}
|
|
||||||
genKatSalt = []byte{0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02}
|
|
||||||
genKatSecret = []byte{0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03}
|
|
||||||
genKatAAD = []byte{0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04}
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestArgon2(t *testing.T) {
|
|
||||||
defer func(sse4 bool) { useSSE4 = sse4 }(useSSE4)
|
|
||||||
|
|
||||||
if useSSE4 {
|
|
||||||
t.Log("SSE4.1 version")
|
|
||||||
testArgon2i(t)
|
|
||||||
testArgon2d(t)
|
|
||||||
testArgon2id(t)
|
|
||||||
useSSE4 = false
|
|
||||||
}
|
|
||||||
t.Log("generic version")
|
|
||||||
testArgon2i(t)
|
|
||||||
testArgon2d(t)
|
|
||||||
testArgon2id(t)
|
|
||||||
}
|
|
||||||
|
|
||||||
func testArgon2d(t *testing.T) {
|
|
||||||
want := []byte{
|
|
||||||
0x51, 0x2b, 0x39, 0x1b, 0x6f, 0x11, 0x62, 0x97,
|
|
||||||
0x53, 0x71, 0xd3, 0x09, 0x19, 0x73, 0x42, 0x94,
|
|
||||||
0xf8, 0x68, 0xe3, 0xbe, 0x39, 0x84, 0xf3, 0xc1,
|
|
||||||
0xa1, 0x3a, 0x4d, 0xb9, 0xfa, 0xbe, 0x4a, 0xcb,
|
|
||||||
}
|
|
||||||
hash := deriveKey(argon2d, genKatPassword, genKatSalt, genKatSecret, genKatAAD, 3, 32, 4, 32)
|
|
||||||
if !bytes.Equal(hash, want) {
|
|
||||||
t.Errorf("derived key does not match - got: %s , want: %s", hex.EncodeToString(hash), hex.EncodeToString(want))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func testArgon2i(t *testing.T) {
|
|
||||||
want := []byte{
|
|
||||||
0xc8, 0x14, 0xd9, 0xd1, 0xdc, 0x7f, 0x37, 0xaa,
|
|
||||||
0x13, 0xf0, 0xd7, 0x7f, 0x24, 0x94, 0xbd, 0xa1,
|
|
||||||
0xc8, 0xde, 0x6b, 0x01, 0x6d, 0xd3, 0x88, 0xd2,
|
|
||||||
0x99, 0x52, 0xa4, 0xc4, 0x67, 0x2b, 0x6c, 0xe8,
|
|
||||||
}
|
|
||||||
hash := deriveKey(argon2i, genKatPassword, genKatSalt, genKatSecret, genKatAAD, 3, 32, 4, 32)
|
|
||||||
if !bytes.Equal(hash, want) {
|
|
||||||
t.Errorf("derived key does not match - got: %s , want: %s", hex.EncodeToString(hash), hex.EncodeToString(want))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func testArgon2id(t *testing.T) {
|
|
||||||
want := []byte{
|
|
||||||
0x0d, 0x64, 0x0d, 0xf5, 0x8d, 0x78, 0x76, 0x6c,
|
|
||||||
0x08, 0xc0, 0x37, 0xa3, 0x4a, 0x8b, 0x53, 0xc9,
|
|
||||||
0xd0, 0x1e, 0xf0, 0x45, 0x2d, 0x75, 0xb6, 0x5e,
|
|
||||||
0xb5, 0x25, 0x20, 0xe9, 0x6b, 0x01, 0xe6, 0x59,
|
|
||||||
}
|
|
||||||
hash := deriveKey(argon2id, genKatPassword, genKatSalt, genKatSecret, genKatAAD, 3, 32, 4, 32)
|
|
||||||
if !bytes.Equal(hash, want) {
|
|
||||||
t.Errorf("derived key does not match - got: %s , want: %s", hex.EncodeToString(hash), hex.EncodeToString(want))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestVectorsIDKey(t *testing.T) {
|
|
||||||
password, salt := []byte("password"), []byte("somesalt")
|
|
||||||
for _, v := range testVectors {
|
|
||||||
v := v
|
|
||||||
t.Run("", func(t *testing.T) {
|
|
||||||
var fn func(password, salt, secret, data []byte, keyLen uint32) []byte
|
|
||||||
switch v.mode {
|
|
||||||
case argon2id:
|
|
||||||
fn = NewIDKey(v.time, v.memory, v.threads)
|
|
||||||
default:
|
|
||||||
t.Skip()
|
|
||||||
}
|
|
||||||
want, err := hex.DecodeString(v.hash)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to decode hash: %v", err)
|
|
||||||
}
|
|
||||||
hash := fn(password, salt, nil, nil, uint32(len(want)))
|
|
||||||
if !bytes.Equal(hash, want) {
|
|
||||||
t.Errorf("got: %s want: %s", hex.EncodeToString(hash), hex.EncodeToString(want))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestVectors(t *testing.T) {
|
|
||||||
password, salt := []byte("password"), []byte("somesalt")
|
|
||||||
for _, v := range testVectors {
|
|
||||||
v := v
|
|
||||||
t.Run("", func(t *testing.T) {
|
|
||||||
want, err := hex.DecodeString(v.hash)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to decode hash: %v", err)
|
|
||||||
}
|
|
||||||
hash := deriveKey(v.mode, password, salt, nil, nil, v.time, v.memory, v.threads, uint32(len(want)))
|
|
||||||
if !bytes.Equal(hash, want) {
|
|
||||||
t.Errorf("got: %s want: %s", hex.EncodeToString(hash), hex.EncodeToString(want))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchmarkArgon2(mode int, time, memory uint32, threads uint8, keyLen uint32, b *testing.B) {
|
|
||||||
password := []byte("password")
|
|
||||||
salt := []byte("choosing random salts is hard")
|
|
||||||
b.ReportAllocs()
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
deriveKey(mode, password, salt, nil, nil, time, memory, threads, keyLen)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkArgon2i(b *testing.B) {
|
|
||||||
b.Run(" Time: 3 Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2i, 3, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 4 Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2i, 4, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 5 Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2i, 5, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 3 Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2i, 3, 64*1024, 4, 32, b) })
|
|
||||||
b.Run(" Time: 4 Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2i, 4, 64*1024, 4, 32, b) })
|
|
||||||
b.Run(" Time: 5 Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2i, 5, 64*1024, 4, 32, b) })
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkArgon2d(b *testing.B) {
|
|
||||||
b.Run(" Time: 3, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2d, 3, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 4, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2d, 4, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 5, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2d, 5, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 3, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2d, 3, 64*1024, 4, 32, b) })
|
|
||||||
b.Run(" Time: 4, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2d, 4, 64*1024, 4, 32, b) })
|
|
||||||
b.Run(" Time: 5, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2d, 5, 64*1024, 4, 32, b) })
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkArgon2id(b *testing.B) {
|
|
||||||
b.Run(" Time: 3, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2id, 3, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 4, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2id, 4, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 5, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2(argon2id, 5, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 3, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2id, 3, 64*1024, 4, 32, b) })
|
|
||||||
b.Run(" Time: 4, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2id, 4, 64*1024, 4, 32, b) })
|
|
||||||
b.Run(" Time: 5, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2(argon2id, 5, 64*1024, 4, 32, b) })
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchmarkArgon2idNew(time, memory uint32, threads uint8, keyLen uint32, b *testing.B) {
|
|
||||||
f := NewIDKey(time, memory, threads)
|
|
||||||
password := []byte("password")
|
|
||||||
salt := []byte("choosing random salts is hard")
|
|
||||||
b.ReportAllocs()
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
_ = f(password, salt, nil, nil, keyLen)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkArgon2idNew(b *testing.B) {
|
|
||||||
b.Run(" Time: 3, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2idNew(3, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 4, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2idNew(4, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 5, Memory: 32 MB, Threads: 1", func(b *testing.B) { benchmarkArgon2idNew(5, 32*1024, 1, 32, b) })
|
|
||||||
b.Run(" Time: 3, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2idNew(3, 64*1024, 4, 32, b) })
|
|
||||||
b.Run(" Time: 4, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2idNew(4, 64*1024, 4, 32, b) })
|
|
||||||
b.Run(" Time: 5, Memory: 64 MB, Threads: 4", func(b *testing.B) { benchmarkArgon2idNew(5, 64*1024, 4, 32, b) })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generated with the CLI of https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf
|
|
||||||
var testVectors = []struct {
|
|
||||||
mode int
|
|
||||||
time, memory uint32
|
|
||||||
threads uint8
|
|
||||||
hash string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
mode: argon2i, time: 1, memory: 64, threads: 1,
|
|
||||||
hash: "b9c401d1844a67d50eae3967dc28870b22e508092e861a37",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2d, time: 1, memory: 64, threads: 1,
|
|
||||||
hash: "8727405fd07c32c78d64f547f24150d3f2e703a89f981a19",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2id, time: 1, memory: 64, threads: 1,
|
|
||||||
hash: "655ad15eac652dc59f7170a7332bf49b8469be1fdb9c28bb",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2i, time: 2, memory: 64, threads: 1,
|
|
||||||
hash: "8cf3d8f76a6617afe35fac48eb0b7433a9a670ca4a07ed64",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2d, time: 2, memory: 64, threads: 1,
|
|
||||||
hash: "3be9ec79a69b75d3752acb59a1fbb8b295a46529c48fbb75",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2id, time: 2, memory: 64, threads: 1,
|
|
||||||
hash: "068d62b26455936aa6ebe60060b0a65870dbfa3ddf8d41f7",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2i, time: 2, memory: 64, threads: 2,
|
|
||||||
hash: "2089f3e78a799720f80af806553128f29b132cafe40d059f",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2d, time: 2, memory: 64, threads: 2,
|
|
||||||
hash: "68e2462c98b8bc6bb60ec68db418ae2c9ed24fc6748a40e9",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2id, time: 2, memory: 64, threads: 2,
|
|
||||||
hash: "350ac37222f436ccb5c0972f1ebd3bf6b958bf2071841362",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2i, time: 3, memory: 256, threads: 2,
|
|
||||||
hash: "f5bbf5d4c3836af13193053155b73ec7476a6a2eb93fd5e6",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2d, time: 3, memory: 256, threads: 2,
|
|
||||||
hash: "f4f0669218eaf3641f39cc97efb915721102f4b128211ef2",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2id, time: 3, memory: 256, threads: 2,
|
|
||||||
hash: "4668d30ac4187e6878eedeacf0fd83c5a0a30db2cc16ef0b",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2i, time: 4, memory: 4096, threads: 4,
|
|
||||||
hash: "a11f7b7f3f93f02ad4bddb59ab62d121e278369288a0d0e7",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2d, time: 4, memory: 4096, threads: 4,
|
|
||||||
hash: "935598181aa8dc2b720914aa6435ac8d3e3a4210c5b0fb2d",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2id, time: 4, memory: 4096, threads: 4,
|
|
||||||
hash: "145db9733a9f4ee43edf33c509be96b934d505a4efb33c5a",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2i, time: 4, memory: 1024, threads: 8,
|
|
||||||
hash: "0cdd3956aa35e6b475a7b0c63488822f774f15b43f6e6e17",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2d, time: 4, memory: 1024, threads: 8,
|
|
||||||
hash: "83604fc2ad0589b9d055578f4d3cc55bc616df3578a896e9",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2id, time: 4, memory: 1024, threads: 8,
|
|
||||||
hash: "8dafa8e004f8ea96bf7c0f93eecf67a6047476143d15577f",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2i, time: 2, memory: 64, threads: 3,
|
|
||||||
hash: "5cab452fe6b8479c8661def8cd703b611a3905a6d5477fe6",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2d, time: 2, memory: 64, threads: 3,
|
|
||||||
hash: "22474a423bda2ccd36ec9afd5119e5c8949798cadf659f51",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2id, time: 2, memory: 64, threads: 3,
|
|
||||||
hash: "4a15b31aec7c2590b87d1f520be7d96f56658172deaa3079",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2i, time: 3, memory: 1024, threads: 6,
|
|
||||||
hash: "d236b29c2b2a09babee842b0dec6aa1e83ccbdea8023dced",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2d, time: 3, memory: 1024, threads: 6,
|
|
||||||
hash: "a3351b0319a53229152023d9206902f4ef59661cdca89481",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
mode: argon2id, time: 3, memory: 1024, threads: 6,
|
|
||||||
hash: "1640b932f4b60e272f5d2207b9a9c626ffa1bd88d2349016",
|
|
||||||
},
|
|
||||||
}
|
|
@ -1,53 +0,0 @@
|
|||||||
// Copyright 2017 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package argon2
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/binary"
|
|
||||||
"hash"
|
|
||||||
|
|
||||||
"golang.org/x/crypto/blake2b"
|
|
||||||
)
|
|
||||||
|
|
||||||
// blake2bHash computes an arbitrary long hash value of in
|
|
||||||
// and writes the hash to out.
|
|
||||||
func blake2bHash(out []byte, in []byte) {
|
|
||||||
var b2 hash.Hash
|
|
||||||
if n := len(out); n < blake2b.Size {
|
|
||||||
b2, _ = blake2b.New(n, nil)
|
|
||||||
} else {
|
|
||||||
b2, _ = blake2b.New512(nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
var buffer [blake2b.Size]byte
|
|
||||||
binary.LittleEndian.PutUint32(buffer[:4], uint32(len(out)))
|
|
||||||
b2.Write(buffer[:4])
|
|
||||||
b2.Write(in)
|
|
||||||
|
|
||||||
if len(out) <= blake2b.Size {
|
|
||||||
b2.Sum(out[:0])
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
outLen := len(out)
|
|
||||||
b2.Sum(buffer[:0])
|
|
||||||
b2.Reset()
|
|
||||||
copy(out, buffer[:32])
|
|
||||||
out = out[32:]
|
|
||||||
for len(out) > blake2b.Size {
|
|
||||||
b2.Write(buffer[:])
|
|
||||||
b2.Sum(buffer[:0])
|
|
||||||
copy(out, buffer[:32])
|
|
||||||
out = out[32:]
|
|
||||||
b2.Reset()
|
|
||||||
}
|
|
||||||
|
|
||||||
if outLen%blake2b.Size > 0 { // outLen > 64
|
|
||||||
r := ((outLen + 31) / 32) - 2 // ⌈τ /32⌉-2
|
|
||||||
b2, _ = blake2b.New(outLen-32*r, nil)
|
|
||||||
}
|
|
||||||
b2.Write(buffer[:])
|
|
||||||
b2.Sum(out[:0])
|
|
||||||
}
|
|
@ -1,60 +0,0 @@
|
|||||||
// Copyright 2017 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build amd64,!gccgo,!appengine
|
|
||||||
|
|
||||||
package argon2
|
|
||||||
|
|
||||||
import "golang.org/x/sys/cpu"
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
useSSE4 = cpu.X86.HasSSE41
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:noescape
|
|
||||||
func mixBlocksSSE2(out, a, b, c *block)
|
|
||||||
|
|
||||||
//go:noescape
|
|
||||||
func xorBlocksSSE2(out, a, b, c *block)
|
|
||||||
|
|
||||||
//go:noescape
|
|
||||||
func blamkaSSE4(b *block)
|
|
||||||
|
|
||||||
func processBlockSSE(out, in1, in2 *block, xor bool) {
|
|
||||||
var t block
|
|
||||||
mixBlocksSSE2(&t, in1, in2, &t)
|
|
||||||
if useSSE4 {
|
|
||||||
blamkaSSE4(&t)
|
|
||||||
} else {
|
|
||||||
for i := 0; i < blockLength; i += 16 {
|
|
||||||
blamkaGeneric(
|
|
||||||
&t[i+0], &t[i+1], &t[i+2], &t[i+3],
|
|
||||||
&t[i+4], &t[i+5], &t[i+6], &t[i+7],
|
|
||||||
&t[i+8], &t[i+9], &t[i+10], &t[i+11],
|
|
||||||
&t[i+12], &t[i+13], &t[i+14], &t[i+15],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
for i := 0; i < blockLength/8; i += 2 {
|
|
||||||
blamkaGeneric(
|
|
||||||
&t[i], &t[i+1], &t[16+i], &t[16+i+1],
|
|
||||||
&t[32+i], &t[32+i+1], &t[48+i], &t[48+i+1],
|
|
||||||
&t[64+i], &t[64+i+1], &t[80+i], &t[80+i+1],
|
|
||||||
&t[96+i], &t[96+i+1], &t[112+i], &t[112+i+1],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if xor {
|
|
||||||
xorBlocksSSE2(out, in1, in2, &t)
|
|
||||||
} else {
|
|
||||||
mixBlocksSSE2(out, in1, in2, &t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func processBlock(out, in1, in2 *block) {
|
|
||||||
processBlockSSE(out, in1, in2, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
func processBlockXOR(out, in1, in2 *block) {
|
|
||||||
processBlockSSE(out, in1, in2, true)
|
|
||||||
}
|
|
@ -1,243 +0,0 @@
|
|||||||
// Copyright 2017 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build amd64,!gccgo,!appengine
|
|
||||||
|
|
||||||
#include "textflag.h"
|
|
||||||
|
|
||||||
DATA ·c40<>+0x00(SB)/8, $0x0201000706050403
|
|
||||||
DATA ·c40<>+0x08(SB)/8, $0x0a09080f0e0d0c0b
|
|
||||||
GLOBL ·c40<>(SB), (NOPTR+RODATA), $16
|
|
||||||
|
|
||||||
DATA ·c48<>+0x00(SB)/8, $0x0100070605040302
|
|
||||||
DATA ·c48<>+0x08(SB)/8, $0x09080f0e0d0c0b0a
|
|
||||||
GLOBL ·c48<>(SB), (NOPTR+RODATA), $16
|
|
||||||
|
|
||||||
#define SHUFFLE(v2, v3, v4, v5, v6, v7, t1, t2) \
|
|
||||||
MOVO v4, t1; \
|
|
||||||
MOVO v5, v4; \
|
|
||||||
MOVO t1, v5; \
|
|
||||||
MOVO v6, t1; \
|
|
||||||
PUNPCKLQDQ v6, t2; \
|
|
||||||
PUNPCKHQDQ v7, v6; \
|
|
||||||
PUNPCKHQDQ t2, v6; \
|
|
||||||
PUNPCKLQDQ v7, t2; \
|
|
||||||
MOVO t1, v7; \
|
|
||||||
MOVO v2, t1; \
|
|
||||||
PUNPCKHQDQ t2, v7; \
|
|
||||||
PUNPCKLQDQ v3, t2; \
|
|
||||||
PUNPCKHQDQ t2, v2; \
|
|
||||||
PUNPCKLQDQ t1, t2; \
|
|
||||||
PUNPCKHQDQ t2, v3
|
|
||||||
|
|
||||||
#define SHUFFLE_INV(v2, v3, v4, v5, v6, v7, t1, t2) \
|
|
||||||
MOVO v4, t1; \
|
|
||||||
MOVO v5, v4; \
|
|
||||||
MOVO t1, v5; \
|
|
||||||
MOVO v2, t1; \
|
|
||||||
PUNPCKLQDQ v2, t2; \
|
|
||||||
PUNPCKHQDQ v3, v2; \
|
|
||||||
PUNPCKHQDQ t2, v2; \
|
|
||||||
PUNPCKLQDQ v3, t2; \
|
|
||||||
MOVO t1, v3; \
|
|
||||||
MOVO v6, t1; \
|
|
||||||
PUNPCKHQDQ t2, v3; \
|
|
||||||
PUNPCKLQDQ v7, t2; \
|
|
||||||
PUNPCKHQDQ t2, v6; \
|
|
||||||
PUNPCKLQDQ t1, t2; \
|
|
||||||
PUNPCKHQDQ t2, v7
|
|
||||||
|
|
||||||
#define HALF_ROUND(v0, v1, v2, v3, v4, v5, v6, v7, t0, c40, c48) \
|
|
||||||
MOVO v0, t0; \
|
|
||||||
PMULULQ v2, t0; \
|
|
||||||
PADDQ v2, v0; \
|
|
||||||
PADDQ t0, v0; \
|
|
||||||
PADDQ t0, v0; \
|
|
||||||
PXOR v0, v6; \
|
|
||||||
PSHUFD $0xB1, v6, v6; \
|
|
||||||
MOVO v4, t0; \
|
|
||||||
PMULULQ v6, t0; \
|
|
||||||
PADDQ v6, v4; \
|
|
||||||
PADDQ t0, v4; \
|
|
||||||
PADDQ t0, v4; \
|
|
||||||
PXOR v4, v2; \
|
|
||||||
PSHUFB c40, v2; \
|
|
||||||
MOVO v0, t0; \
|
|
||||||
PMULULQ v2, t0; \
|
|
||||||
PADDQ v2, v0; \
|
|
||||||
PADDQ t0, v0; \
|
|
||||||
PADDQ t0, v0; \
|
|
||||||
PXOR v0, v6; \
|
|
||||||
PSHUFB c48, v6; \
|
|
||||||
MOVO v4, t0; \
|
|
||||||
PMULULQ v6, t0; \
|
|
||||||
PADDQ v6, v4; \
|
|
||||||
PADDQ t0, v4; \
|
|
||||||
PADDQ t0, v4; \
|
|
||||||
PXOR v4, v2; \
|
|
||||||
MOVO v2, t0; \
|
|
||||||
PADDQ v2, t0; \
|
|
||||||
PSRLQ $63, v2; \
|
|
||||||
PXOR t0, v2; \
|
|
||||||
MOVO v1, t0; \
|
|
||||||
PMULULQ v3, t0; \
|
|
||||||
PADDQ v3, v1; \
|
|
||||||
PADDQ t0, v1; \
|
|
||||||
PADDQ t0, v1; \
|
|
||||||
PXOR v1, v7; \
|
|
||||||
PSHUFD $0xB1, v7, v7; \
|
|
||||||
MOVO v5, t0; \
|
|
||||||
PMULULQ v7, t0; \
|
|
||||||
PADDQ v7, v5; \
|
|
||||||
PADDQ t0, v5; \
|
|
||||||
PADDQ t0, v5; \
|
|
||||||
PXOR v5, v3; \
|
|
||||||
PSHUFB c40, v3; \
|
|
||||||
MOVO v1, t0; \
|
|
||||||
PMULULQ v3, t0; \
|
|
||||||
PADDQ v3, v1; \
|
|
||||||
PADDQ t0, v1; \
|
|
||||||
PADDQ t0, v1; \
|
|
||||||
PXOR v1, v7; \
|
|
||||||
PSHUFB c48, v7; \
|
|
||||||
MOVO v5, t0; \
|
|
||||||
PMULULQ v7, t0; \
|
|
||||||
PADDQ v7, v5; \
|
|
||||||
PADDQ t0, v5; \
|
|
||||||
PADDQ t0, v5; \
|
|
||||||
PXOR v5, v3; \
|
|
||||||
MOVO v3, t0; \
|
|
||||||
PADDQ v3, t0; \
|
|
||||||
PSRLQ $63, v3; \
|
|
||||||
PXOR t0, v3
|
|
||||||
|
|
||||||
#define LOAD_MSG_0(block, off) \
|
|
||||||
MOVOU 8*(off+0)(block), X0; \
|
|
||||||
MOVOU 8*(off+2)(block), X1; \
|
|
||||||
MOVOU 8*(off+4)(block), X2; \
|
|
||||||
MOVOU 8*(off+6)(block), X3; \
|
|
||||||
MOVOU 8*(off+8)(block), X4; \
|
|
||||||
MOVOU 8*(off+10)(block), X5; \
|
|
||||||
MOVOU 8*(off+12)(block), X6; \
|
|
||||||
MOVOU 8*(off+14)(block), X7
|
|
||||||
|
|
||||||
#define STORE_MSG_0(block, off) \
|
|
||||||
MOVOU X0, 8*(off+0)(block); \
|
|
||||||
MOVOU X1, 8*(off+2)(block); \
|
|
||||||
MOVOU X2, 8*(off+4)(block); \
|
|
||||||
MOVOU X3, 8*(off+6)(block); \
|
|
||||||
MOVOU X4, 8*(off+8)(block); \
|
|
||||||
MOVOU X5, 8*(off+10)(block); \
|
|
||||||
MOVOU X6, 8*(off+12)(block); \
|
|
||||||
MOVOU X7, 8*(off+14)(block)
|
|
||||||
|
|
||||||
#define LOAD_MSG_1(block, off) \
|
|
||||||
MOVOU 8*off+0*8(block), X0; \
|
|
||||||
MOVOU 8*off+16*8(block), X1; \
|
|
||||||
MOVOU 8*off+32*8(block), X2; \
|
|
||||||
MOVOU 8*off+48*8(block), X3; \
|
|
||||||
MOVOU 8*off+64*8(block), X4; \
|
|
||||||
MOVOU 8*off+80*8(block), X5; \
|
|
||||||
MOVOU 8*off+96*8(block), X6; \
|
|
||||||
MOVOU 8*off+112*8(block), X7
|
|
||||||
|
|
||||||
#define STORE_MSG_1(block, off) \
|
|
||||||
MOVOU X0, 8*off+0*8(block); \
|
|
||||||
MOVOU X1, 8*off+16*8(block); \
|
|
||||||
MOVOU X2, 8*off+32*8(block); \
|
|
||||||
MOVOU X3, 8*off+48*8(block); \
|
|
||||||
MOVOU X4, 8*off+64*8(block); \
|
|
||||||
MOVOU X5, 8*off+80*8(block); \
|
|
||||||
MOVOU X6, 8*off+96*8(block); \
|
|
||||||
MOVOU X7, 8*off+112*8(block)
|
|
||||||
|
|
||||||
#define BLAMKA_ROUND_0(block, off, t0, t1, c40, c48) \
|
|
||||||
LOAD_MSG_0(block, off); \
|
|
||||||
HALF_ROUND(X0, X1, X2, X3, X4, X5, X6, X7, t0, c40, c48); \
|
|
||||||
SHUFFLE(X2, X3, X4, X5, X6, X7, t0, t1); \
|
|
||||||
HALF_ROUND(X0, X1, X2, X3, X4, X5, X6, X7, t0, c40, c48); \
|
|
||||||
SHUFFLE_INV(X2, X3, X4, X5, X6, X7, t0, t1); \
|
|
||||||
STORE_MSG_0(block, off)
|
|
||||||
|
|
||||||
#define BLAMKA_ROUND_1(block, off, t0, t1, c40, c48) \
|
|
||||||
LOAD_MSG_1(block, off); \
|
|
||||||
HALF_ROUND(X0, X1, X2, X3, X4, X5, X6, X7, t0, c40, c48); \
|
|
||||||
SHUFFLE(X2, X3, X4, X5, X6, X7, t0, t1); \
|
|
||||||
HALF_ROUND(X0, X1, X2, X3, X4, X5, X6, X7, t0, c40, c48); \
|
|
||||||
SHUFFLE_INV(X2, X3, X4, X5, X6, X7, t0, t1); \
|
|
||||||
STORE_MSG_1(block, off)
|
|
||||||
|
|
||||||
// func blamkaSSE4(b *block)
|
|
||||||
TEXT ·blamkaSSE4(SB), 4, $0-8
|
|
||||||
MOVQ b+0(FP), AX
|
|
||||||
|
|
||||||
MOVOU ·c40<>(SB), X10
|
|
||||||
MOVOU ·c48<>(SB), X11
|
|
||||||
|
|
||||||
BLAMKA_ROUND_0(AX, 0, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_0(AX, 16, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_0(AX, 32, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_0(AX, 48, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_0(AX, 64, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_0(AX, 80, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_0(AX, 96, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_0(AX, 112, X8, X9, X10, X11)
|
|
||||||
|
|
||||||
BLAMKA_ROUND_1(AX, 0, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_1(AX, 2, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_1(AX, 4, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_1(AX, 6, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_1(AX, 8, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_1(AX, 10, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_1(AX, 12, X8, X9, X10, X11)
|
|
||||||
BLAMKA_ROUND_1(AX, 14, X8, X9, X10, X11)
|
|
||||||
RET
|
|
||||||
|
|
||||||
// func mixBlocksSSE2(out, a, b, c *block)
|
|
||||||
TEXT ·mixBlocksSSE2(SB), 4, $0-32
|
|
||||||
MOVQ out+0(FP), DX
|
|
||||||
MOVQ a+8(FP), AX
|
|
||||||
MOVQ b+16(FP), BX
|
|
||||||
MOVQ a+24(FP), CX
|
|
||||||
MOVQ $128, BP
|
|
||||||
|
|
||||||
loop:
|
|
||||||
MOVOU 0(AX), X0
|
|
||||||
MOVOU 0(BX), X1
|
|
||||||
MOVOU 0(CX), X2
|
|
||||||
PXOR X1, X0
|
|
||||||
PXOR X2, X0
|
|
||||||
MOVOU X0, 0(DX)
|
|
||||||
ADDQ $16, AX
|
|
||||||
ADDQ $16, BX
|
|
||||||
ADDQ $16, CX
|
|
||||||
ADDQ $16, DX
|
|
||||||
SUBQ $2, BP
|
|
||||||
JA loop
|
|
||||||
RET
|
|
||||||
|
|
||||||
// func xorBlocksSSE2(out, a, b, c *block)
|
|
||||||
TEXT ·xorBlocksSSE2(SB), 4, $0-32
|
|
||||||
MOVQ out+0(FP), DX
|
|
||||||
MOVQ a+8(FP), AX
|
|
||||||
MOVQ b+16(FP), BX
|
|
||||||
MOVQ a+24(FP), CX
|
|
||||||
MOVQ $128, BP
|
|
||||||
|
|
||||||
loop:
|
|
||||||
MOVOU 0(AX), X0
|
|
||||||
MOVOU 0(BX), X1
|
|
||||||
MOVOU 0(CX), X2
|
|
||||||
MOVOU 0(DX), X3
|
|
||||||
PXOR X1, X0
|
|
||||||
PXOR X2, X0
|
|
||||||
PXOR X3, X0
|
|
||||||
MOVOU X0, 0(DX)
|
|
||||||
ADDQ $16, AX
|
|
||||||
ADDQ $16, BX
|
|
||||||
ADDQ $16, CX
|
|
||||||
ADDQ $16, DX
|
|
||||||
SUBQ $2, BP
|
|
||||||
JA loop
|
|
||||||
RET
|
|
@ -1,163 +0,0 @@
|
|||||||
// Copyright 2017 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package argon2
|
|
||||||
|
|
||||||
var useSSE4 bool
|
|
||||||
|
|
||||||
func processBlockGeneric(out, in1, in2 *block, xor bool) {
|
|
||||||
var t block
|
|
||||||
for i := range t {
|
|
||||||
t[i] = in1[i] ^ in2[i]
|
|
||||||
}
|
|
||||||
for i := 0; i < blockLength; i += 16 {
|
|
||||||
blamkaGeneric(
|
|
||||||
&t[i+0], &t[i+1], &t[i+2], &t[i+3],
|
|
||||||
&t[i+4], &t[i+5], &t[i+6], &t[i+7],
|
|
||||||
&t[i+8], &t[i+9], &t[i+10], &t[i+11],
|
|
||||||
&t[i+12], &t[i+13], &t[i+14], &t[i+15],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
for i := 0; i < blockLength/8; i += 2 {
|
|
||||||
blamkaGeneric(
|
|
||||||
&t[i], &t[i+1], &t[16+i], &t[16+i+1],
|
|
||||||
&t[32+i], &t[32+i+1], &t[48+i], &t[48+i+1],
|
|
||||||
&t[64+i], &t[64+i+1], &t[80+i], &t[80+i+1],
|
|
||||||
&t[96+i], &t[96+i+1], &t[112+i], &t[112+i+1],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
if xor {
|
|
||||||
for i := range t {
|
|
||||||
out[i] ^= in1[i] ^ in2[i] ^ t[i]
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for i := range t {
|
|
||||||
out[i] = in1[i] ^ in2[i] ^ t[i]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func blamkaGeneric(t00, t01, t02, t03, t04, t05, t06, t07, t08, t09, t10, t11, t12, t13, t14, t15 *uint64) {
|
|
||||||
v00, v01, v02, v03 := *t00, *t01, *t02, *t03
|
|
||||||
v04, v05, v06, v07 := *t04, *t05, *t06, *t07
|
|
||||||
v08, v09, v10, v11 := *t08, *t09, *t10, *t11
|
|
||||||
v12, v13, v14, v15 := *t12, *t13, *t14, *t15
|
|
||||||
|
|
||||||
v00 += v04 + 2*uint64(uint32(v00))*uint64(uint32(v04))
|
|
||||||
v12 ^= v00
|
|
||||||
v12 = v12>>32 | v12<<32
|
|
||||||
v08 += v12 + 2*uint64(uint32(v08))*uint64(uint32(v12))
|
|
||||||
v04 ^= v08
|
|
||||||
v04 = v04>>24 | v04<<40
|
|
||||||
|
|
||||||
v00 += v04 + 2*uint64(uint32(v00))*uint64(uint32(v04))
|
|
||||||
v12 ^= v00
|
|
||||||
v12 = v12>>16 | v12<<48
|
|
||||||
v08 += v12 + 2*uint64(uint32(v08))*uint64(uint32(v12))
|
|
||||||
v04 ^= v08
|
|
||||||
v04 = v04>>63 | v04<<1
|
|
||||||
|
|
||||||
v01 += v05 + 2*uint64(uint32(v01))*uint64(uint32(v05))
|
|
||||||
v13 ^= v01
|
|
||||||
v13 = v13>>32 | v13<<32
|
|
||||||
v09 += v13 + 2*uint64(uint32(v09))*uint64(uint32(v13))
|
|
||||||
v05 ^= v09
|
|
||||||
v05 = v05>>24 | v05<<40
|
|
||||||
|
|
||||||
v01 += v05 + 2*uint64(uint32(v01))*uint64(uint32(v05))
|
|
||||||
v13 ^= v01
|
|
||||||
v13 = v13>>16 | v13<<48
|
|
||||||
v09 += v13 + 2*uint64(uint32(v09))*uint64(uint32(v13))
|
|
||||||
v05 ^= v09
|
|
||||||
v05 = v05>>63 | v05<<1
|
|
||||||
|
|
||||||
v02 += v06 + 2*uint64(uint32(v02))*uint64(uint32(v06))
|
|
||||||
v14 ^= v02
|
|
||||||
v14 = v14>>32 | v14<<32
|
|
||||||
v10 += v14 + 2*uint64(uint32(v10))*uint64(uint32(v14))
|
|
||||||
v06 ^= v10
|
|
||||||
v06 = v06>>24 | v06<<40
|
|
||||||
|
|
||||||
v02 += v06 + 2*uint64(uint32(v02))*uint64(uint32(v06))
|
|
||||||
v14 ^= v02
|
|
||||||
v14 = v14>>16 | v14<<48
|
|
||||||
v10 += v14 + 2*uint64(uint32(v10))*uint64(uint32(v14))
|
|
||||||
v06 ^= v10
|
|
||||||
v06 = v06>>63 | v06<<1
|
|
||||||
|
|
||||||
v03 += v07 + 2*uint64(uint32(v03))*uint64(uint32(v07))
|
|
||||||
v15 ^= v03
|
|
||||||
v15 = v15>>32 | v15<<32
|
|
||||||
v11 += v15 + 2*uint64(uint32(v11))*uint64(uint32(v15))
|
|
||||||
v07 ^= v11
|
|
||||||
v07 = v07>>24 | v07<<40
|
|
||||||
|
|
||||||
v03 += v07 + 2*uint64(uint32(v03))*uint64(uint32(v07))
|
|
||||||
v15 ^= v03
|
|
||||||
v15 = v15>>16 | v15<<48
|
|
||||||
v11 += v15 + 2*uint64(uint32(v11))*uint64(uint32(v15))
|
|
||||||
v07 ^= v11
|
|
||||||
v07 = v07>>63 | v07<<1
|
|
||||||
|
|
||||||
v00 += v05 + 2*uint64(uint32(v00))*uint64(uint32(v05))
|
|
||||||
v15 ^= v00
|
|
||||||
v15 = v15>>32 | v15<<32
|
|
||||||
v10 += v15 + 2*uint64(uint32(v10))*uint64(uint32(v15))
|
|
||||||
v05 ^= v10
|
|
||||||
v05 = v05>>24 | v05<<40
|
|
||||||
|
|
||||||
v00 += v05 + 2*uint64(uint32(v00))*uint64(uint32(v05))
|
|
||||||
v15 ^= v00
|
|
||||||
v15 = v15>>16 | v15<<48
|
|
||||||
v10 += v15 + 2*uint64(uint32(v10))*uint64(uint32(v15))
|
|
||||||
v05 ^= v10
|
|
||||||
v05 = v05>>63 | v05<<1
|
|
||||||
|
|
||||||
v01 += v06 + 2*uint64(uint32(v01))*uint64(uint32(v06))
|
|
||||||
v12 ^= v01
|
|
||||||
v12 = v12>>32 | v12<<32
|
|
||||||
v11 += v12 + 2*uint64(uint32(v11))*uint64(uint32(v12))
|
|
||||||
v06 ^= v11
|
|
||||||
v06 = v06>>24 | v06<<40
|
|
||||||
|
|
||||||
v01 += v06 + 2*uint64(uint32(v01))*uint64(uint32(v06))
|
|
||||||
v12 ^= v01
|
|
||||||
v12 = v12>>16 | v12<<48
|
|
||||||
v11 += v12 + 2*uint64(uint32(v11))*uint64(uint32(v12))
|
|
||||||
v06 ^= v11
|
|
||||||
v06 = v06>>63 | v06<<1
|
|
||||||
|
|
||||||
v02 += v07 + 2*uint64(uint32(v02))*uint64(uint32(v07))
|
|
||||||
v13 ^= v02
|
|
||||||
v13 = v13>>32 | v13<<32
|
|
||||||
v08 += v13 + 2*uint64(uint32(v08))*uint64(uint32(v13))
|
|
||||||
v07 ^= v08
|
|
||||||
v07 = v07>>24 | v07<<40
|
|
||||||
|
|
||||||
v02 += v07 + 2*uint64(uint32(v02))*uint64(uint32(v07))
|
|
||||||
v13 ^= v02
|
|
||||||
v13 = v13>>16 | v13<<48
|
|
||||||
v08 += v13 + 2*uint64(uint32(v08))*uint64(uint32(v13))
|
|
||||||
v07 ^= v08
|
|
||||||
v07 = v07>>63 | v07<<1
|
|
||||||
|
|
||||||
v03 += v04 + 2*uint64(uint32(v03))*uint64(uint32(v04))
|
|
||||||
v14 ^= v03
|
|
||||||
v14 = v14>>32 | v14<<32
|
|
||||||
v09 += v14 + 2*uint64(uint32(v09))*uint64(uint32(v14))
|
|
||||||
v04 ^= v09
|
|
||||||
v04 = v04>>24 | v04<<40
|
|
||||||
|
|
||||||
v03 += v04 + 2*uint64(uint32(v03))*uint64(uint32(v04))
|
|
||||||
v14 ^= v03
|
|
||||||
v14 = v14>>16 | v14<<48
|
|
||||||
v09 += v14 + 2*uint64(uint32(v09))*uint64(uint32(v14))
|
|
||||||
v04 ^= v09
|
|
||||||
v04 = v04>>63 | v04<<1
|
|
||||||
|
|
||||||
*t00, *t01, *t02, *t03 = v00, v01, v02, v03
|
|
||||||
*t04, *t05, *t06, *t07 = v04, v05, v06, v07
|
|
||||||
*t08, *t09, *t10, *t11 = v08, v09, v10, v11
|
|
||||||
*t12, *t13, *t14, *t15 = v12, v13, v14, v15
|
|
||||||
}
|
|
@ -1,15 +0,0 @@
|
|||||||
// Copyright 2017 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build !amd64 appengine gccgo
|
|
||||||
|
|
||||||
package argon2
|
|
||||||
|
|
||||||
func processBlock(out, in1, in2 *block) {
|
|
||||||
processBlockGeneric(out, in1, in2, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
func processBlockXOR(out, in1, in2 *block) {
|
|
||||||
processBlockGeneric(out, in1, in2, true)
|
|
||||||
}
|
|
@ -1,131 +0,0 @@
|
|||||||
// Copyright 2015 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in https://golang.org/LICENSE
|
|
||||||
|
|
||||||
package csv_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/csv"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func ExampleReader() {
|
|
||||||
in := `first_name,last_name,username
|
|
||||||
"Rob","Pike",rob
|
|
||||||
Ken,Thompson,ken
|
|
||||||
"Robert","Griesemer","gri"
|
|
||||||
`
|
|
||||||
r := csv.NewReader(strings.NewReader(in))
|
|
||||||
|
|
||||||
for {
|
|
||||||
record, err := r.Read()
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Println(record)
|
|
||||||
}
|
|
||||||
// Output:
|
|
||||||
// [first_name last_name username]
|
|
||||||
// [Rob Pike rob]
|
|
||||||
// [Ken Thompson ken]
|
|
||||||
// [Robert Griesemer gri]
|
|
||||||
}
|
|
||||||
|
|
||||||
// This example shows how csv.Reader can be configured to handle other
|
|
||||||
// types of CSV files.
|
|
||||||
func ExampleReader_options() {
|
|
||||||
in := `first_name;last_name;username
|
|
||||||
"Rob";"Pike";rob
|
|
||||||
# lines beginning with a # character are ignored
|
|
||||||
Ken;Thompson;ken
|
|
||||||
"Robert";"Griesemer";"gri"
|
|
||||||
`
|
|
||||||
r := csv.NewReader(strings.NewReader(in))
|
|
||||||
r.Comma = ';'
|
|
||||||
r.Comment = '#'
|
|
||||||
|
|
||||||
records, err := r.ReadAll()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Print(records)
|
|
||||||
// Output:
|
|
||||||
// [[first_name last_name username] [Rob Pike rob] [Ken Thompson ken] [Robert Griesemer gri]]
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleReader_ReadAll() {
|
|
||||||
in := `first_name,last_name,username
|
|
||||||
"Rob","Pike",rob
|
|
||||||
Ken,Thompson,ken
|
|
||||||
"Robert","Griesemer","gri"
|
|
||||||
`
|
|
||||||
r := csv.NewReader(strings.NewReader(in))
|
|
||||||
|
|
||||||
records, err := r.ReadAll()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Print(records)
|
|
||||||
// Output:
|
|
||||||
// [[first_name last_name username] [Rob Pike rob] [Ken Thompson ken] [Robert Griesemer gri]]
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleWriter() {
|
|
||||||
records := [][]string{
|
|
||||||
{"first_name", "last_name", "username"},
|
|
||||||
{"Rob", "Pike", "rob"},
|
|
||||||
{"Ken", "Thompson", "ken"},
|
|
||||||
{"Robert", "Griesemer", "gri"},
|
|
||||||
}
|
|
||||||
|
|
||||||
w := csv.NewWriter(os.Stdout)
|
|
||||||
|
|
||||||
for _, record := range records {
|
|
||||||
if err := w.Write(record); err != nil {
|
|
||||||
log.Fatalln("error writing record to csv:", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write any buffered data to the underlying writer (standard output).
|
|
||||||
w.Flush()
|
|
||||||
|
|
||||||
if err := w.Error(); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
// Output:
|
|
||||||
// first_name,last_name,username
|
|
||||||
// Rob,Pike,rob
|
|
||||||
// Ken,Thompson,ken
|
|
||||||
// Robert,Griesemer,gri
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleWriter_WriteAll() {
|
|
||||||
records := [][]string{
|
|
||||||
{"first_name", "last_name", "username"},
|
|
||||||
{"Rob", "Pike", "rob"},
|
|
||||||
{"Ken", "Thompson", "ken"},
|
|
||||||
{"Robert", "Griesemer", "gri"},
|
|
||||||
}
|
|
||||||
|
|
||||||
w := csv.NewWriter(os.Stdout)
|
|
||||||
w.WriteAll(records) // calls Flush internally
|
|
||||||
|
|
||||||
if err := w.Error(); err != nil {
|
|
||||||
log.Fatalln("error writing csv:", err)
|
|
||||||
}
|
|
||||||
// Output:
|
|
||||||
// first_name,last_name,username
|
|
||||||
// Rob,Pike,rob
|
|
||||||
// Ken,Thompson,ken
|
|
||||||
// Robert,Griesemer,gri
|
|
||||||
}
|
|
@ -1,70 +0,0 @@
|
|||||||
// Copyright 2019 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in https://golang.org/LICENSE
|
|
||||||
|
|
||||||
// +build gofuzz
|
|
||||||
|
|
||||||
package csv
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Fuzz(data []byte) int {
|
|
||||||
score := 0
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
|
|
||||||
for _, tt := range []Reader{
|
|
||||||
{},
|
|
||||||
{Comma: ';'},
|
|
||||||
{Comma: '\t'},
|
|
||||||
{LazyQuotes: true},
|
|
||||||
{TrimLeadingSpace: true},
|
|
||||||
{Comment: '#'},
|
|
||||||
{Comment: ';'},
|
|
||||||
} {
|
|
||||||
r := NewReader(bytes.NewReader(data))
|
|
||||||
r.Comma = tt.Comma
|
|
||||||
r.Comment = tt.Comment
|
|
||||||
r.LazyQuotes = tt.LazyQuotes
|
|
||||||
r.TrimLeadingSpace = tt.TrimLeadingSpace
|
|
||||||
|
|
||||||
records, err := r.ReadAll()
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
score = 1
|
|
||||||
|
|
||||||
buf.Reset()
|
|
||||||
w := NewWriter(buf)
|
|
||||||
w.Comma = tt.Comma
|
|
||||||
err = w.WriteAll(records)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("writer = %#v\n", w)
|
|
||||||
fmt.Printf("records = %v\n", records)
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
r = NewReader(buf)
|
|
||||||
r.Comma = tt.Comma
|
|
||||||
r.Comment = tt.Comment
|
|
||||||
r.LazyQuotes = tt.LazyQuotes
|
|
||||||
r.TrimLeadingSpace = tt.TrimLeadingSpace
|
|
||||||
result, err := r.ReadAll()
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("reader = %#v\n", r)
|
|
||||||
fmt.Printf("records = %v\n", records)
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !reflect.DeepEqual(records, result) {
|
|
||||||
fmt.Println("records = \n", records)
|
|
||||||
fmt.Println("result = \n", records)
|
|
||||||
panic("not equal")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return score
|
|
||||||
}
|
|
@ -1,458 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in https://golang.org/LICENSE
|
|
||||||
|
|
||||||
// Package csv reads and writes comma-separated values (CSV) files.
|
|
||||||
// There are many kinds of CSV files; this package supports the format
|
|
||||||
// described in RFC 4180.
|
|
||||||
//
|
|
||||||
// A csv file contains zero or more records of one or more fields per record.
|
|
||||||
// Each record is separated by the newline character. The final record may
|
|
||||||
// optionally be followed by a newline character.
|
|
||||||
//
|
|
||||||
// field1,field2,field3
|
|
||||||
//
|
|
||||||
// White space is considered part of a field.
|
|
||||||
//
|
|
||||||
// Carriage returns before newline characters are silently removed.
|
|
||||||
//
|
|
||||||
// Blank lines are ignored. A line with only whitespace characters (excluding
|
|
||||||
// the ending newline character) is not considered a blank line.
|
|
||||||
//
|
|
||||||
// Fields which start and stop with the quote character " are called
|
|
||||||
// quoted-fields. The beginning and ending quote are not part of the
|
|
||||||
// field.
|
|
||||||
//
|
|
||||||
// The source:
|
|
||||||
//
|
|
||||||
// normal string,"quoted-field"
|
|
||||||
//
|
|
||||||
// results in the fields
|
|
||||||
//
|
|
||||||
// {`normal string`, `quoted-field`}
|
|
||||||
//
|
|
||||||
// Within a quoted-field a quote character followed by a second quote
|
|
||||||
// character is considered a single quote.
|
|
||||||
//
|
|
||||||
// "the ""word"" is true","a ""quoted-field"""
|
|
||||||
//
|
|
||||||
// results in
|
|
||||||
//
|
|
||||||
// {`the "word" is true`, `a "quoted-field"`}
|
|
||||||
//
|
|
||||||
// Newlines and commas may be included in a quoted-field
|
|
||||||
//
|
|
||||||
// "Multi-line
|
|
||||||
// field","comma is ,"
|
|
||||||
//
|
|
||||||
// results in
|
|
||||||
//
|
|
||||||
// {`Multi-line
|
|
||||||
// field`, `comma is ,`}
|
|
||||||
//
|
|
||||||
// Modified to be used with MinIO. Main modifications include
|
|
||||||
// - Configurable 'quote' parameter
|
|
||||||
// - Performance improvements
|
|
||||||
// benchmark old ns/op new ns/op delta
|
|
||||||
// BenchmarkRead-8 2807 2189 -22.02%
|
|
||||||
// BenchmarkReadWithFieldsPerRecord-8 2802 2179 -22.23%
|
|
||||||
// BenchmarkReadWithoutFieldsPerRecord-8 2824 2181 -22.77%
|
|
||||||
// BenchmarkReadLargeFields-8 3584 3371 -5.94%
|
|
||||||
// BenchmarkReadReuseRecord-8 2044 1480 -27.59%
|
|
||||||
// BenchmarkReadReuseRecordWithFieldsPerRecord-8 2056 1483 -27.87%
|
|
||||||
// BenchmarkReadReuseRecordWithoutFieldsPerRecord-8 2047 1482 -27.60%
|
|
||||||
// BenchmarkReadReuseRecordLargeFields-8 2777 2594 -6.59%
|
|
||||||
package csv
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A ParseError is returned for parsing errors.
|
|
||||||
// Line numbers are 1-indexed and columns are 0-indexed.
|
|
||||||
type ParseError struct {
|
|
||||||
StartLine int // Line where the record starts
|
|
||||||
Line int // Line where the error occurred
|
|
||||||
Column int // Column (rune index) where the error occurred
|
|
||||||
Err error // The actual error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *ParseError) Error() string {
|
|
||||||
if e.Err == ErrFieldCount {
|
|
||||||
return fmt.Sprintf("record on line %d: %v", e.Line, e.Err)
|
|
||||||
}
|
|
||||||
if e.StartLine != e.Line {
|
|
||||||
return fmt.Sprintf("record on line %d; parse error on line %d, column %d: %v", e.StartLine, e.Line, e.Column, e.Err)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("parse error on line %d, column %d: %v", e.Line, e.Column, e.Err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unwrap returns the underlying error
|
|
||||||
func (e *ParseError) Unwrap() error { return e.Err }
|
|
||||||
|
|
||||||
// These are the errors that can be returned in ParseError.Err.
|
|
||||||
var (
|
|
||||||
ErrTrailingComma = errors.New("extra delimiter at end of line") // Deprecated: No longer used.
|
|
||||||
ErrBareQuote = errors.New("bare \" in non-quoted-field")
|
|
||||||
ErrQuote = errors.New("extraneous or missing \" in quoted-field")
|
|
||||||
ErrFieldCount = errors.New("wrong number of fields")
|
|
||||||
)
|
|
||||||
|
|
||||||
var errInvalidDelim = errors.New("csv: invalid field or comment delimiter")
|
|
||||||
|
|
||||||
func validDelim(r rune) bool {
|
|
||||||
return r != 0 && r != '"' && r != '\r' && r != '\n' && utf8.ValidRune(r) && r != utf8.RuneError
|
|
||||||
}
|
|
||||||
|
|
||||||
// A Reader reads records from a CSV-encoded file.
|
|
||||||
//
|
|
||||||
// As returned by NewReader, a Reader expects input conforming to RFC 4180.
|
|
||||||
// The exported fields can be changed to customize the details before the
|
|
||||||
// first call to Read or ReadAll.
|
|
||||||
//
|
|
||||||
// The Reader converts all \r\n sequences in its input to plain \n,
|
|
||||||
// including in multiline field values, so that the returned data does
|
|
||||||
// not depend on which line-ending convention an input file uses.
|
|
||||||
type Reader struct {
|
|
||||||
// Comma is the field delimiter.
|
|
||||||
// It is set to comma (',') by NewReader.
|
|
||||||
// Comma must be a valid rune and must not be \r, \n,
|
|
||||||
// or the Unicode replacement character (0xFFFD).
|
|
||||||
Comma rune
|
|
||||||
|
|
||||||
// Quote is a single rune used for marking fields limits
|
|
||||||
Quote []rune
|
|
||||||
|
|
||||||
// QuoteEscape is a single rune to escape the quote character
|
|
||||||
QuoteEscape rune
|
|
||||||
|
|
||||||
// Comment, if not 0, is the comment character. Lines beginning with the
|
|
||||||
// Comment character without preceding whitespace are ignored.
|
|
||||||
// With leading whitespace the Comment character becomes part of the
|
|
||||||
// field, even if TrimLeadingSpace is true.
|
|
||||||
// Comment must be a valid rune and must not be \r, \n,
|
|
||||||
// or the Unicode replacement character (0xFFFD).
|
|
||||||
// It must also not be equal to Comma.
|
|
||||||
Comment rune
|
|
||||||
|
|
||||||
// FieldsPerRecord is the number of expected fields per record.
|
|
||||||
// If FieldsPerRecord is positive, Read requires each record to
|
|
||||||
// have the given number of fields. If FieldsPerRecord is 0, Read sets it to
|
|
||||||
// the number of fields in the first record, so that future records must
|
|
||||||
// have the same field count. If FieldsPerRecord is negative, no check is
|
|
||||||
// made and records may have a variable number of fields.
|
|
||||||
FieldsPerRecord int
|
|
||||||
|
|
||||||
// If LazyQuotes is true, a quote may appear in an unquoted field and a
|
|
||||||
// non-doubled quote may appear in a quoted field.
|
|
||||||
LazyQuotes bool
|
|
||||||
|
|
||||||
// If TrimLeadingSpace is true, leading white space in a field is ignored.
|
|
||||||
// This is done even if the field delimiter, Comma, is white space.
|
|
||||||
TrimLeadingSpace bool
|
|
||||||
|
|
||||||
// ReuseRecord controls whether calls to Read may return a slice sharing
|
|
||||||
// the backing array of the previous call's returned slice for performance.
|
|
||||||
// By default, each call to Read returns newly allocated memory owned by the caller.
|
|
||||||
ReuseRecord bool
|
|
||||||
|
|
||||||
TrailingComma bool // Deprecated: No longer used.
|
|
||||||
|
|
||||||
r *bufio.Reader
|
|
||||||
|
|
||||||
// numLine is the current line being read in the CSV file.
|
|
||||||
numLine int
|
|
||||||
|
|
||||||
// rawBuffer is a line buffer only used by the readLine method.
|
|
||||||
rawBuffer []byte
|
|
||||||
|
|
||||||
// recordBuffer holds the unescaped fields, one after another.
|
|
||||||
// The fields can be accessed by using the indexes in fieldIndexes.
|
|
||||||
// E.g., For the row `a,"b","c""d",e`, recordBuffer will contain `abc"de`
|
|
||||||
// and fieldIndexes will contain the indexes [1, 2, 5, 6].
|
|
||||||
recordBuffer []byte
|
|
||||||
|
|
||||||
// fieldIndexes is an index of fields inside recordBuffer.
|
|
||||||
// The i'th field ends at offset fieldIndexes[i] in recordBuffer.
|
|
||||||
fieldIndexes []int
|
|
||||||
|
|
||||||
// lastRecord is a record cache and only used when ReuseRecord == true.
|
|
||||||
lastRecord []string
|
|
||||||
|
|
||||||
// Caching some values between Read() calls for performance gain
|
|
||||||
cached bool
|
|
||||||
cachedQuoteEscapeLen int
|
|
||||||
cachedQuoteLen int
|
|
||||||
cachedEncodedQuote []byte
|
|
||||||
cachedCommaLen int
|
|
||||||
cachedQuotes string
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewReader returns a new Reader that reads from r.
|
|
||||||
func NewReader(r io.Reader) *Reader {
|
|
||||||
return &Reader{
|
|
||||||
Comma: ',',
|
|
||||||
Quote: []rune(`"`),
|
|
||||||
QuoteEscape: '"',
|
|
||||||
r: bufio.NewReader(r),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read reads one record (a slice of fields) from r.
|
|
||||||
// If the record has an unexpected number of fields,
|
|
||||||
// Read returns the record along with the error ErrFieldCount.
|
|
||||||
// Except for that case, Read always returns either a non-nil
|
|
||||||
// record or a non-nil error, but not both.
|
|
||||||
// If there is no data left to be read, Read returns nil, io.EOF.
|
|
||||||
// If ReuseRecord is true, the returned slice may be shared
|
|
||||||
// between multiple calls to Read.
|
|
||||||
func (r *Reader) Read() (record []string, err error) {
|
|
||||||
if r.ReuseRecord {
|
|
||||||
record, err = r.readRecord(r.lastRecord)
|
|
||||||
r.lastRecord = record
|
|
||||||
} else {
|
|
||||||
record, err = r.readRecord(nil)
|
|
||||||
}
|
|
||||||
return record, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReadAll reads all the remaining records from r.
|
|
||||||
// Each record is a slice of fields.
|
|
||||||
// A successful call returns err == nil, not err == io.EOF. Because ReadAll is
|
|
||||||
// defined to read until EOF, it does not treat end of file as an error to be
|
|
||||||
// reported.
|
|
||||||
func (r *Reader) ReadAll() (records [][]string, err error) {
|
|
||||||
for {
|
|
||||||
record, err := r.readRecord(nil)
|
|
||||||
if err == io.EOF {
|
|
||||||
return records, nil
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
records = append(records, record)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// readLine reads the next line (with the trailing endline).
|
|
||||||
// If EOF is hit without a trailing endline, it will be omitted.
|
|
||||||
// If some bytes were read, then the error is never io.EOF.
|
|
||||||
// The result is only valid until the next call to readLine.
|
|
||||||
func (r *Reader) readLine() ([]byte, error) {
|
|
||||||
line, err := r.r.ReadSlice('\n')
|
|
||||||
if err == bufio.ErrBufferFull {
|
|
||||||
r.rawBuffer = append(r.rawBuffer[:0], line...)
|
|
||||||
for err == bufio.ErrBufferFull {
|
|
||||||
line, err = r.r.ReadSlice('\n')
|
|
||||||
r.rawBuffer = append(r.rawBuffer, line...)
|
|
||||||
}
|
|
||||||
line = r.rawBuffer
|
|
||||||
}
|
|
||||||
if len(line) > 0 && err == io.EOF {
|
|
||||||
err = nil
|
|
||||||
// For backwards compatibility, drop trailing \r before EOF.
|
|
||||||
if line[len(line)-1] == '\r' {
|
|
||||||
line = line[:len(line)-1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
r.numLine++
|
|
||||||
// Normalize \r\n to \n on all input lines.
|
|
||||||
if n := len(line); n >= 2 && line[n-2] == '\r' && line[n-1] == '\n' {
|
|
||||||
line[n-2] = '\n'
|
|
||||||
line = line[:n-1]
|
|
||||||
}
|
|
||||||
return line, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// lengthNL reports the number of bytes for the trailing \n.
|
|
||||||
func lengthNL(b []byte) int {
|
|
||||||
if len(b) > 0 && b[len(b)-1] == '\n' {
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// nextRune returns the next rune in b or utf8.RuneError.
|
|
||||||
func nextRune(b []byte) rune {
|
|
||||||
r, _ := utf8.DecodeRune(b)
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
|
|
||||||
func encodeRune(r rune) []byte {
|
|
||||||
rlen := utf8.RuneLen(r)
|
|
||||||
p := make([]byte, rlen)
|
|
||||||
_ = utf8.EncodeRune(p, r)
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Reader) readRecord(dst []string) ([]string, error) {
|
|
||||||
if r.Comma == r.Comment || !validDelim(r.Comma) || (r.Comment != 0 && !validDelim(r.Comment)) {
|
|
||||||
return nil, errInvalidDelim
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read line (automatically skipping past empty lines and any comments).
|
|
||||||
var line, fullLine []byte
|
|
||||||
var errRead error
|
|
||||||
for errRead == nil {
|
|
||||||
line, errRead = r.readLine()
|
|
||||||
if r.Comment != 0 && nextRune(line) == r.Comment {
|
|
||||||
line = nil
|
|
||||||
continue // Skip comment lines
|
|
||||||
}
|
|
||||||
if errRead == nil && len(line) == lengthNL(line) {
|
|
||||||
line = nil
|
|
||||||
continue // Skip empty lines
|
|
||||||
}
|
|
||||||
fullLine = line
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if errRead == io.EOF {
|
|
||||||
return nil, errRead
|
|
||||||
}
|
|
||||||
|
|
||||||
if !r.cached {
|
|
||||||
r.cachedQuoteEscapeLen = utf8.RuneLen(r.QuoteEscape)
|
|
||||||
if len(r.Quote) > 0 {
|
|
||||||
r.cachedQuoteLen = utf8.RuneLen(r.Quote[0])
|
|
||||||
r.cachedEncodedQuote = encodeRune(r.Quote[0])
|
|
||||||
r.cachedQuotes += string(r.Quote[0])
|
|
||||||
}
|
|
||||||
r.cachedCommaLen = utf8.RuneLen(r.Comma)
|
|
||||||
r.cachedQuotes += string(r.QuoteEscape)
|
|
||||||
r.cached = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse each field in the record.
|
|
||||||
var err error
|
|
||||||
recLine := r.numLine // Starting line for record
|
|
||||||
r.recordBuffer = r.recordBuffer[:0]
|
|
||||||
r.fieldIndexes = r.fieldIndexes[:0]
|
|
||||||
parseField:
|
|
||||||
for {
|
|
||||||
if r.TrimLeadingSpace {
|
|
||||||
line = bytes.TrimLeftFunc(line, unicode.IsSpace)
|
|
||||||
}
|
|
||||||
if len(line) == 0 || r.cachedQuoteLen == 0 || nextRune(line) != r.Quote[0] {
|
|
||||||
// Non-quoted string field
|
|
||||||
i := bytes.IndexRune(line, r.Comma)
|
|
||||||
field := line
|
|
||||||
if i >= 0 {
|
|
||||||
field = field[:i]
|
|
||||||
} else {
|
|
||||||
field = field[:len(field)-lengthNL(field)]
|
|
||||||
}
|
|
||||||
// Check to make sure a quote does not appear in field.
|
|
||||||
if !r.LazyQuotes {
|
|
||||||
if j := bytes.IndexRune(field, r.Quote[0]); j >= 0 {
|
|
||||||
col := utf8.RuneCount(fullLine[:len(fullLine)-len(line[j:])])
|
|
||||||
err = &ParseError{StartLine: recLine, Line: r.numLine, Column: col, Err: ErrBareQuote}
|
|
||||||
break parseField
|
|
||||||
}
|
|
||||||
}
|
|
||||||
r.recordBuffer = append(r.recordBuffer, field...)
|
|
||||||
r.fieldIndexes = append(r.fieldIndexes, len(r.recordBuffer))
|
|
||||||
if i >= 0 {
|
|
||||||
line = line[i+r.cachedCommaLen:]
|
|
||||||
continue parseField
|
|
||||||
}
|
|
||||||
break parseField
|
|
||||||
} else {
|
|
||||||
// Quoted string field
|
|
||||||
line = line[r.cachedQuoteLen:]
|
|
||||||
for {
|
|
||||||
i := bytes.IndexAny(line, r.cachedQuotes)
|
|
||||||
if i >= 0 {
|
|
||||||
// Hit next quote or escape quote
|
|
||||||
r.recordBuffer = append(r.recordBuffer, line[:i]...)
|
|
||||||
|
|
||||||
escape := nextRune(line[i:]) == r.QuoteEscape
|
|
||||||
if escape {
|
|
||||||
line = line[i+r.cachedQuoteEscapeLen:]
|
|
||||||
} else {
|
|
||||||
line = line[i+r.cachedQuoteLen:]
|
|
||||||
}
|
|
||||||
|
|
||||||
switch rn := nextRune(line); {
|
|
||||||
case escape && r.QuoteEscape != r.Quote[0]:
|
|
||||||
r.recordBuffer = append(r.recordBuffer, encodeRune(rn)...)
|
|
||||||
line = line[utf8.RuneLen(rn):]
|
|
||||||
case rn == r.Quote[0]:
|
|
||||||
// `""` sequence (append quote).
|
|
||||||
r.recordBuffer = append(r.recordBuffer, r.cachedEncodedQuote...)
|
|
||||||
line = line[r.cachedQuoteLen:]
|
|
||||||
case rn == r.Comma:
|
|
||||||
// `",` sequence (end of field).
|
|
||||||
line = line[r.cachedCommaLen:]
|
|
||||||
r.fieldIndexes = append(r.fieldIndexes, len(r.recordBuffer))
|
|
||||||
continue parseField
|
|
||||||
case lengthNL(line) == len(line):
|
|
||||||
// `"\n` sequence (end of line).
|
|
||||||
r.fieldIndexes = append(r.fieldIndexes, len(r.recordBuffer))
|
|
||||||
break parseField
|
|
||||||
case r.LazyQuotes:
|
|
||||||
// `"` sequence (bare quote).
|
|
||||||
r.recordBuffer = append(r.recordBuffer, r.cachedEncodedQuote...)
|
|
||||||
default:
|
|
||||||
// `"*` sequence (invalid non-escaped quote).
|
|
||||||
col := utf8.RuneCount(fullLine[:len(fullLine)-len(line)-r.cachedQuoteLen])
|
|
||||||
err = &ParseError{StartLine: recLine, Line: r.numLine, Column: col, Err: ErrQuote}
|
|
||||||
break parseField
|
|
||||||
}
|
|
||||||
} else if len(line) > 0 {
|
|
||||||
// Hit end of line (copy all data so far).
|
|
||||||
r.recordBuffer = append(r.recordBuffer, line...)
|
|
||||||
if errRead != nil {
|
|
||||||
break parseField
|
|
||||||
}
|
|
||||||
line, errRead = r.readLine()
|
|
||||||
if errRead == io.EOF {
|
|
||||||
errRead = nil
|
|
||||||
}
|
|
||||||
fullLine = line
|
|
||||||
} else {
|
|
||||||
// Abrupt end of file (EOF or error).
|
|
||||||
if !r.LazyQuotes && errRead == nil {
|
|
||||||
col := utf8.RuneCount(fullLine)
|
|
||||||
err = &ParseError{StartLine: recLine, Line: r.numLine, Column: col, Err: ErrQuote}
|
|
||||||
break parseField
|
|
||||||
}
|
|
||||||
r.fieldIndexes = append(r.fieldIndexes, len(r.recordBuffer))
|
|
||||||
break parseField
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err == nil {
|
|
||||||
err = errRead
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a single string and create slices out of it.
|
|
||||||
// This pins the memory of the fields together, but allocates once.
|
|
||||||
str := string(r.recordBuffer) // Convert to string once to batch allocations
|
|
||||||
dst = dst[:0]
|
|
||||||
if cap(dst) < len(r.fieldIndexes) {
|
|
||||||
dst = make([]string, len(r.fieldIndexes))
|
|
||||||
}
|
|
||||||
dst = dst[:len(r.fieldIndexes)]
|
|
||||||
var preIdx int
|
|
||||||
for i, idx := range r.fieldIndexes {
|
|
||||||
dst[i] = str[preIdx:idx]
|
|
||||||
preIdx = idx
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check or update the expected fields per record.
|
|
||||||
if r.FieldsPerRecord > 0 {
|
|
||||||
if len(dst) != r.FieldsPerRecord && err == nil {
|
|
||||||
err = &ParseError{StartLine: recLine, Line: recLine, Err: ErrFieldCount}
|
|
||||||
}
|
|
||||||
} else if r.FieldsPerRecord == 0 {
|
|
||||||
r.FieldsPerRecord = len(dst)
|
|
||||||
}
|
|
||||||
return dst, err
|
|
||||||
}
|
|
@ -1,509 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in https://golang.org/LICENSE
|
|
||||||
|
|
||||||
package csv
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRead(t *testing.T) {
|
|
||||||
tests := []struct {
|
|
||||||
Name string
|
|
||||||
Input string
|
|
||||||
Output [][]string
|
|
||||||
Error error
|
|
||||||
|
|
||||||
// These fields are copied into the Reader
|
|
||||||
Comma rune
|
|
||||||
Comment rune
|
|
||||||
UseFieldsPerRecord bool // false (default) means FieldsPerRecord is -1
|
|
||||||
FieldsPerRecord int
|
|
||||||
LazyQuotes bool
|
|
||||||
TrimLeadingSpace bool
|
|
||||||
ReuseRecord bool
|
|
||||||
}{{
|
|
||||||
Name: "Simple",
|
|
||||||
Input: "a,b,c\n",
|
|
||||||
Output: [][]string{{"a", "b", "c"}},
|
|
||||||
}, {
|
|
||||||
Name: "CRLF",
|
|
||||||
Input: "a,b\r\nc,d\r\n",
|
|
||||||
Output: [][]string{{"a", "b"}, {"c", "d"}},
|
|
||||||
}, {
|
|
||||||
Name: "BareCR",
|
|
||||||
Input: "a,b\rc,d\r\n",
|
|
||||||
Output: [][]string{{"a", "b\rc", "d"}},
|
|
||||||
}, {
|
|
||||||
Name: "RFC4180test",
|
|
||||||
Input: `#field1,field2,field3
|
|
||||||
"aaa","bb
|
|
||||||
b","ccc"
|
|
||||||
"a,a","b""bb","ccc"
|
|
||||||
zzz,yyy,xxx
|
|
||||||
`,
|
|
||||||
Output: [][]string{
|
|
||||||
{"#field1", "field2", "field3"},
|
|
||||||
{"aaa", "bb\nb", "ccc"},
|
|
||||||
{"a,a", `b"bb`, "ccc"},
|
|
||||||
{"zzz", "yyy", "xxx"},
|
|
||||||
},
|
|
||||||
UseFieldsPerRecord: true,
|
|
||||||
FieldsPerRecord: 0,
|
|
||||||
}, {
|
|
||||||
Name: "NoEOLTest",
|
|
||||||
Input: "a,b,c",
|
|
||||||
Output: [][]string{{"a", "b", "c"}},
|
|
||||||
}, {
|
|
||||||
Name: "Semicolon",
|
|
||||||
Input: "a;b;c\n",
|
|
||||||
Output: [][]string{{"a", "b", "c"}},
|
|
||||||
Comma: ';',
|
|
||||||
}, {
|
|
||||||
Name: "MultiLine",
|
|
||||||
Input: `"two
|
|
||||||
line","one line","three
|
|
||||||
line
|
|
||||||
field"`,
|
|
||||||
Output: [][]string{{"two\nline", "one line", "three\nline\nfield"}},
|
|
||||||
}, {
|
|
||||||
Name: "BlankLine",
|
|
||||||
Input: "a,b,c\n\nd,e,f\n\n",
|
|
||||||
Output: [][]string{
|
|
||||||
{"a", "b", "c"},
|
|
||||||
{"d", "e", "f"},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
Name: "BlankLineFieldCount",
|
|
||||||
Input: "a,b,c\n\nd,e,f\n\n",
|
|
||||||
Output: [][]string{
|
|
||||||
{"a", "b", "c"},
|
|
||||||
{"d", "e", "f"},
|
|
||||||
},
|
|
||||||
UseFieldsPerRecord: true,
|
|
||||||
FieldsPerRecord: 0,
|
|
||||||
}, {
|
|
||||||
Name: "TrimSpace",
|
|
||||||
Input: " a, b, c\n",
|
|
||||||
Output: [][]string{{"a", "b", "c"}},
|
|
||||||
TrimLeadingSpace: true,
|
|
||||||
}, {
|
|
||||||
Name: "LeadingSpace",
|
|
||||||
Input: " a, b, c\n",
|
|
||||||
Output: [][]string{{" a", " b", " c"}},
|
|
||||||
}, {
|
|
||||||
Name: "Comment",
|
|
||||||
Input: "#1,2,3\na,b,c\n#comment",
|
|
||||||
Output: [][]string{{"a", "b", "c"}},
|
|
||||||
Comment: '#',
|
|
||||||
}, {
|
|
||||||
Name: "NoComment",
|
|
||||||
Input: "#1,2,3\na,b,c",
|
|
||||||
Output: [][]string{{"#1", "2", "3"}, {"a", "b", "c"}},
|
|
||||||
}, {
|
|
||||||
Name: "LazyQuotes",
|
|
||||||
Input: `a "word","1"2",a","b`,
|
|
||||||
Output: [][]string{{`a "word"`, `1"2`, `a"`, `b`}},
|
|
||||||
LazyQuotes: true,
|
|
||||||
}, {
|
|
||||||
Name: "BareQuotes",
|
|
||||||
Input: `a "word","1"2",a"`,
|
|
||||||
Output: [][]string{{`a "word"`, `1"2`, `a"`}},
|
|
||||||
LazyQuotes: true,
|
|
||||||
}, {
|
|
||||||
Name: "BareDoubleQuotes",
|
|
||||||
Input: `a""b,c`,
|
|
||||||
Output: [][]string{{`a""b`, `c`}},
|
|
||||||
LazyQuotes: true,
|
|
||||||
}, {
|
|
||||||
Name: "BadDoubleQuotes",
|
|
||||||
Input: `a""b,c`,
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 1, Column: 1, Err: ErrBareQuote},
|
|
||||||
}, {
|
|
||||||
Name: "TrimQuote",
|
|
||||||
Input: ` "a"," b",c`,
|
|
||||||
Output: [][]string{{"a", " b", "c"}},
|
|
||||||
TrimLeadingSpace: true,
|
|
||||||
}, {
|
|
||||||
Name: "BadBareQuote",
|
|
||||||
Input: `a "word","b"`,
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 1, Column: 2, Err: ErrBareQuote},
|
|
||||||
}, {
|
|
||||||
Name: "BadTrailingQuote",
|
|
||||||
Input: `"a word",b"`,
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 1, Column: 10, Err: ErrBareQuote},
|
|
||||||
}, {
|
|
||||||
Name: "ExtraneousQuote",
|
|
||||||
Input: `"a "word","b"`,
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 1, Column: 3, Err: ErrQuote},
|
|
||||||
}, {
|
|
||||||
Name: "BadFieldCount",
|
|
||||||
Input: "a,b,c\nd,e",
|
|
||||||
Error: &ParseError{StartLine: 2, Line: 2, Err: ErrFieldCount},
|
|
||||||
UseFieldsPerRecord: true,
|
|
||||||
FieldsPerRecord: 0,
|
|
||||||
}, {
|
|
||||||
Name: "BadFieldCount1",
|
|
||||||
Input: `a,b,c`,
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 1, Err: ErrFieldCount},
|
|
||||||
UseFieldsPerRecord: true,
|
|
||||||
FieldsPerRecord: 2,
|
|
||||||
}, {
|
|
||||||
Name: "FieldCount",
|
|
||||||
Input: "a,b,c\nd,e",
|
|
||||||
Output: [][]string{{"a", "b", "c"}, {"d", "e"}},
|
|
||||||
}, {
|
|
||||||
Name: "TrailingCommaEOF",
|
|
||||||
Input: "a,b,c,",
|
|
||||||
Output: [][]string{{"a", "b", "c", ""}},
|
|
||||||
}, {
|
|
||||||
Name: "TrailingCommaEOL",
|
|
||||||
Input: "a,b,c,\n",
|
|
||||||
Output: [][]string{{"a", "b", "c", ""}},
|
|
||||||
}, {
|
|
||||||
Name: "TrailingCommaSpaceEOF",
|
|
||||||
Input: "a,b,c, ",
|
|
||||||
Output: [][]string{{"a", "b", "c", ""}},
|
|
||||||
TrimLeadingSpace: true,
|
|
||||||
}, {
|
|
||||||
Name: "TrailingCommaSpaceEOL",
|
|
||||||
Input: "a,b,c, \n",
|
|
||||||
Output: [][]string{{"a", "b", "c", ""}},
|
|
||||||
TrimLeadingSpace: true,
|
|
||||||
}, {
|
|
||||||
Name: "TrailingCommaLine3",
|
|
||||||
Input: "a,b,c\nd,e,f\ng,hi,",
|
|
||||||
Output: [][]string{{"a", "b", "c"}, {"d", "e", "f"}, {"g", "hi", ""}},
|
|
||||||
TrimLeadingSpace: true,
|
|
||||||
}, {
|
|
||||||
Name: "NotTrailingComma3",
|
|
||||||
Input: "a,b,c, \n",
|
|
||||||
Output: [][]string{{"a", "b", "c", " "}},
|
|
||||||
}, {
|
|
||||||
Name: "CommaFieldTest",
|
|
||||||
Input: `x,y,z,w
|
|
||||||
x,y,z,
|
|
||||||
x,y,,
|
|
||||||
x,,,
|
|
||||||
,,,
|
|
||||||
"x","y","z","w"
|
|
||||||
"x","y","z",""
|
|
||||||
"x","y","",""
|
|
||||||
"x","","",""
|
|
||||||
"","","",""
|
|
||||||
`,
|
|
||||||
Output: [][]string{
|
|
||||||
{"x", "y", "z", "w"},
|
|
||||||
{"x", "y", "z", ""},
|
|
||||||
{"x", "y", "", ""},
|
|
||||||
{"x", "", "", ""},
|
|
||||||
{"", "", "", ""},
|
|
||||||
{"x", "y", "z", "w"},
|
|
||||||
{"x", "y", "z", ""},
|
|
||||||
{"x", "y", "", ""},
|
|
||||||
{"x", "", "", ""},
|
|
||||||
{"", "", "", ""},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
Name: "TrailingCommaIneffective1",
|
|
||||||
Input: "a,b,\nc,d,e",
|
|
||||||
Output: [][]string{
|
|
||||||
{"a", "b", ""},
|
|
||||||
{"c", "d", "e"},
|
|
||||||
},
|
|
||||||
TrimLeadingSpace: true,
|
|
||||||
}, {
|
|
||||||
Name: "ReadAllReuseRecord",
|
|
||||||
Input: "a,b\nc,d",
|
|
||||||
Output: [][]string{
|
|
||||||
{"a", "b"},
|
|
||||||
{"c", "d"},
|
|
||||||
},
|
|
||||||
ReuseRecord: true,
|
|
||||||
}, {
|
|
||||||
Name: "StartLine1", // Issue 19019
|
|
||||||
Input: "a,\"b\nc\"d,e",
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 2, Column: 1, Err: ErrQuote},
|
|
||||||
}, {
|
|
||||||
Name: "StartLine2",
|
|
||||||
Input: "a,b\n\"d\n\n,e",
|
|
||||||
Error: &ParseError{StartLine: 2, Line: 5, Column: 0, Err: ErrQuote},
|
|
||||||
}, {
|
|
||||||
Name: "CRLFInQuotedField", // Issue 21201
|
|
||||||
Input: "A,\"Hello\r\nHi\",B\r\n",
|
|
||||||
Output: [][]string{
|
|
||||||
{"A", "Hello\nHi", "B"},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
Name: "BinaryBlobField", // Issue 19410
|
|
||||||
Input: "x09\x41\xb4\x1c,aktau",
|
|
||||||
Output: [][]string{{"x09A\xb4\x1c", "aktau"}},
|
|
||||||
}, {
|
|
||||||
Name: "TrailingCR",
|
|
||||||
Input: "field1,field2\r",
|
|
||||||
Output: [][]string{{"field1", "field2"}},
|
|
||||||
}, {
|
|
||||||
Name: "QuotedTrailingCR",
|
|
||||||
Input: "\"field\"\r",
|
|
||||||
Output: [][]string{{"field"}},
|
|
||||||
}, {
|
|
||||||
Name: "QuotedTrailingCRCR",
|
|
||||||
Input: "\"field\"\r\r",
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 1, Column: 6, Err: ErrQuote},
|
|
||||||
}, {
|
|
||||||
Name: "FieldCR",
|
|
||||||
Input: "field\rfield\r",
|
|
||||||
Output: [][]string{{"field\rfield"}},
|
|
||||||
}, {
|
|
||||||
Name: "FieldCRCR",
|
|
||||||
Input: "field\r\rfield\r\r",
|
|
||||||
Output: [][]string{{"field\r\rfield\r"}},
|
|
||||||
}, {
|
|
||||||
Name: "FieldCRCRLF",
|
|
||||||
Input: "field\r\r\nfield\r\r\n",
|
|
||||||
Output: [][]string{{"field\r"}, {"field\r"}},
|
|
||||||
}, {
|
|
||||||
Name: "FieldCRCRLFCR",
|
|
||||||
Input: "field\r\r\n\rfield\r\r\n\r",
|
|
||||||
Output: [][]string{{"field\r"}, {"\rfield\r"}},
|
|
||||||
}, {
|
|
||||||
Name: "FieldCRCRLFCRCR",
|
|
||||||
Input: "field\r\r\n\r\rfield\r\r\n\r\r",
|
|
||||||
Output: [][]string{{"field\r"}, {"\r\rfield\r"}, {"\r"}},
|
|
||||||
}, {
|
|
||||||
Name: "MultiFieldCRCRLFCRCR",
|
|
||||||
Input: "field1,field2\r\r\n\r\rfield1,field2\r\r\n\r\r,",
|
|
||||||
Output: [][]string{
|
|
||||||
{"field1", "field2\r"},
|
|
||||||
{"\r\rfield1", "field2\r"},
|
|
||||||
{"\r\r", ""},
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
Name: "NonASCIICommaAndComment",
|
|
||||||
Input: "a£b,c£ \td,e\n€ comment\n",
|
|
||||||
Output: [][]string{{"a", "b,c", "d,e"}},
|
|
||||||
TrimLeadingSpace: true,
|
|
||||||
Comma: '£',
|
|
||||||
Comment: '€',
|
|
||||||
}, {
|
|
||||||
Name: "NonASCIICommaAndCommentWithQuotes",
|
|
||||||
Input: "a€\" b,\"€ c\nλ comment\n",
|
|
||||||
Output: [][]string{{"a", " b,", " c"}},
|
|
||||||
Comma: '€',
|
|
||||||
Comment: 'λ',
|
|
||||||
}, {
|
|
||||||
// λ and θ start with the same byte.
|
|
||||||
// This tests that the parser doesn't confuse such characters.
|
|
||||||
Name: "NonASCIICommaConfusion",
|
|
||||||
Input: "\"abθcd\"λefθgh",
|
|
||||||
Output: [][]string{{"abθcd", "efθgh"}},
|
|
||||||
Comma: 'λ',
|
|
||||||
Comment: '€',
|
|
||||||
}, {
|
|
||||||
Name: "NonASCIICommentConfusion",
|
|
||||||
Input: "λ\nλ\nθ\nλ\n",
|
|
||||||
Output: [][]string{{"λ"}, {"λ"}, {"λ"}},
|
|
||||||
Comment: 'θ',
|
|
||||||
}, {
|
|
||||||
Name: "QuotedFieldMultipleLF",
|
|
||||||
Input: "\"\n\n\n\n\"",
|
|
||||||
Output: [][]string{{"\n\n\n\n"}},
|
|
||||||
}, {
|
|
||||||
Name: "MultipleCRLF",
|
|
||||||
Input: "\r\n\r\n\r\n\r\n",
|
|
||||||
}, {
|
|
||||||
// The implementation may read each line in several chunks if it doesn't fit entirely
|
|
||||||
// in the read buffer, so we should test the code to handle that condition.
|
|
||||||
Name: "HugeLines",
|
|
||||||
Input: strings.Repeat("#ignore\n", 10000) + strings.Repeat("@", 5000) + "," + strings.Repeat("*", 5000),
|
|
||||||
Output: [][]string{{strings.Repeat("@", 5000), strings.Repeat("*", 5000)}},
|
|
||||||
Comment: '#',
|
|
||||||
}, {
|
|
||||||
Name: "QuoteWithTrailingCRLF",
|
|
||||||
Input: "\"foo\"bar\"\r\n",
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 1, Column: 4, Err: ErrQuote},
|
|
||||||
}, {
|
|
||||||
Name: "LazyQuoteWithTrailingCRLF",
|
|
||||||
Input: "\"foo\"bar\"\r\n",
|
|
||||||
Output: [][]string{{`foo"bar`}},
|
|
||||||
LazyQuotes: true,
|
|
||||||
}, {
|
|
||||||
Name: "DoubleQuoteWithTrailingCRLF",
|
|
||||||
Input: "\"foo\"\"bar\"\r\n",
|
|
||||||
Output: [][]string{{`foo"bar`}},
|
|
||||||
}, {
|
|
||||||
Name: "EvenQuotes",
|
|
||||||
Input: `""""""""`,
|
|
||||||
Output: [][]string{{`"""`}},
|
|
||||||
}, {
|
|
||||||
Name: "OddQuotes",
|
|
||||||
Input: `"""""""`,
|
|
||||||
Error: &ParseError{StartLine: 1, Line: 1, Column: 7, Err: ErrQuote},
|
|
||||||
}, {
|
|
||||||
Name: "LazyOddQuotes",
|
|
||||||
Input: `"""""""`,
|
|
||||||
Output: [][]string{{`"""`}},
|
|
||||||
LazyQuotes: true,
|
|
||||||
}, {
|
|
||||||
Name: "BadComma1",
|
|
||||||
Comma: '\n',
|
|
||||||
Error: errInvalidDelim,
|
|
||||||
}, {
|
|
||||||
Name: "BadComma2",
|
|
||||||
Comma: '\r',
|
|
||||||
Error: errInvalidDelim,
|
|
||||||
}, {
|
|
||||||
Name: "BadComma3",
|
|
||||||
Comma: '"',
|
|
||||||
Error: errInvalidDelim,
|
|
||||||
}, {
|
|
||||||
Name: "BadComma4",
|
|
||||||
Comma: utf8.RuneError,
|
|
||||||
Error: errInvalidDelim,
|
|
||||||
}, {
|
|
||||||
Name: "BadComment1",
|
|
||||||
Comment: '\n',
|
|
||||||
Error: errInvalidDelim,
|
|
||||||
}, {
|
|
||||||
Name: "BadComment2",
|
|
||||||
Comment: '\r',
|
|
||||||
Error: errInvalidDelim,
|
|
||||||
}, {
|
|
||||||
Name: "BadComment3",
|
|
||||||
Comment: utf8.RuneError,
|
|
||||||
Error: errInvalidDelim,
|
|
||||||
}, {
|
|
||||||
Name: "BadCommaComment",
|
|
||||||
Comma: 'X',
|
|
||||||
Comment: 'X',
|
|
||||||
Error: errInvalidDelim,
|
|
||||||
}}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.Name, func(t *testing.T) {
|
|
||||||
r := NewReader(strings.NewReader(tt.Input))
|
|
||||||
|
|
||||||
if tt.Comma != 0 {
|
|
||||||
r.Comma = tt.Comma
|
|
||||||
}
|
|
||||||
r.Comment = tt.Comment
|
|
||||||
if tt.UseFieldsPerRecord {
|
|
||||||
r.FieldsPerRecord = tt.FieldsPerRecord
|
|
||||||
} else {
|
|
||||||
r.FieldsPerRecord = -1
|
|
||||||
}
|
|
||||||
r.LazyQuotes = tt.LazyQuotes
|
|
||||||
r.TrimLeadingSpace = tt.TrimLeadingSpace
|
|
||||||
r.ReuseRecord = tt.ReuseRecord
|
|
||||||
|
|
||||||
out, err := r.ReadAll()
|
|
||||||
if !reflect.DeepEqual(err, tt.Error) {
|
|
||||||
t.Errorf("ReadAll() error:\ngot %v\nwant %v", err, tt.Error)
|
|
||||||
} else if !reflect.DeepEqual(out, tt.Output) {
|
|
||||||
t.Errorf("ReadAll() output:\ngot %q\nwant %q", out, tt.Output)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// nTimes is an io.Reader which yields the string s n times.
|
|
||||||
type nTimes struct {
|
|
||||||
s string
|
|
||||||
n int
|
|
||||||
off int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *nTimes) Read(p []byte) (n int, err error) {
|
|
||||||
for {
|
|
||||||
if r.n <= 0 || r.s == "" {
|
|
||||||
return n, io.EOF
|
|
||||||
}
|
|
||||||
n0 := copy(p, r.s[r.off:])
|
|
||||||
p = p[n0:]
|
|
||||||
n += n0
|
|
||||||
r.off += n0
|
|
||||||
if r.off == len(r.s) {
|
|
||||||
r.off = 0
|
|
||||||
r.n--
|
|
||||||
}
|
|
||||||
if len(p) == 0 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// benchmarkRead measures reading the provided CSV rows data.
|
|
||||||
// initReader, if non-nil, modifies the Reader before it's used.
|
|
||||||
func benchmarkRead(b *testing.B, initReader func(*Reader), rows string) {
|
|
||||||
b.ReportAllocs()
|
|
||||||
r := NewReader(&nTimes{s: rows, n: b.N})
|
|
||||||
if initReader != nil {
|
|
||||||
initReader(r)
|
|
||||||
}
|
|
||||||
for {
|
|
||||||
_, err := r.Read()
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const benchmarkCSVData = `x,y,z,w
|
|
||||||
x,y,z,
|
|
||||||
x,y,,
|
|
||||||
x,,,
|
|
||||||
,,,
|
|
||||||
"x","y","z","w"
|
|
||||||
"x","y","z",""
|
|
||||||
"x","y","",""
|
|
||||||
"x","","",""
|
|
||||||
"","","",""
|
|
||||||
`
|
|
||||||
|
|
||||||
func BenchmarkRead(b *testing.B) {
|
|
||||||
benchmarkRead(b, nil, benchmarkCSVData)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkReadWithFieldsPerRecord(b *testing.B) {
|
|
||||||
benchmarkRead(b, func(r *Reader) { r.FieldsPerRecord = 4 }, benchmarkCSVData)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkReadWithoutFieldsPerRecord(b *testing.B) {
|
|
||||||
benchmarkRead(b, func(r *Reader) { r.FieldsPerRecord = -1 }, benchmarkCSVData)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkReadLargeFields(b *testing.B) {
|
|
||||||
benchmarkRead(b, nil, strings.Repeat(`xxxxxxxxxxxxxxxx,yyyyyyyyyyyyyyyy,zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww,vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
|
|
||||||
xxxxxxxxxxxxxxxxxxxxxxxx,yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww,vvvv
|
|
||||||
,,zzzz,wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww,vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
|
|
||||||
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww,vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
|
|
||||||
`, 3))
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkReadReuseRecord(b *testing.B) {
|
|
||||||
benchmarkRead(b, func(r *Reader) { r.ReuseRecord = true }, benchmarkCSVData)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkReadReuseRecordWithFieldsPerRecord(b *testing.B) {
|
|
||||||
benchmarkRead(b, func(r *Reader) { r.ReuseRecord = true; r.FieldsPerRecord = 4 }, benchmarkCSVData)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkReadReuseRecordWithoutFieldsPerRecord(b *testing.B) {
|
|
||||||
benchmarkRead(b, func(r *Reader) { r.ReuseRecord = true; r.FieldsPerRecord = -1 }, benchmarkCSVData)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkReadReuseRecordLargeFields(b *testing.B) {
|
|
||||||
benchmarkRead(b, func(r *Reader) { r.ReuseRecord = true }, strings.Repeat(`xxxxxxxxxxxxxxxx,yyyyyyyyyyyyyyyy,zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww,vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
|
|
||||||
xxxxxxxxxxxxxxxxxxxxxxxx,yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww,vvvv
|
|
||||||
,,zzzz,wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww,vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
|
|
||||||
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww,vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
|
|
||||||
`, 3))
|
|
||||||
}
|
|
@ -1,179 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in https://golang.org/LICENSE
|
|
||||||
|
|
||||||
package csv
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"io"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A Writer writes records using CSV encoding.
|
|
||||||
//
|
|
||||||
// As returned by NewWriter, a Writer writes records terminated by a
|
|
||||||
// newline and uses ',' as the field delimiter. The exported fields can be
|
|
||||||
// changed to customize the details before the first call to Write or WriteAll.
|
|
||||||
//
|
|
||||||
// Comma is the field delimiter.
|
|
||||||
//
|
|
||||||
// If UseCRLF is true, the Writer ends each output line with \r\n instead of \n.
|
|
||||||
//
|
|
||||||
// The writes of individual records are buffered.
|
|
||||||
// After all data has been written, the client should call the
|
|
||||||
// Flush method to guarantee all data has been forwarded to
|
|
||||||
// the underlying io.Writer. Any errors that occurred should
|
|
||||||
// be checked by calling the Error method.
|
|
||||||
type Writer struct {
|
|
||||||
Comma rune // Field delimiter (set to ',' by NewWriter)
|
|
||||||
Quote rune // Fields quote character
|
|
||||||
QuoteEscape rune
|
|
||||||
AlwaysQuote bool // True to quote all fields
|
|
||||||
UseCRLF bool // True to use \r\n as the line terminator
|
|
||||||
w *bufio.Writer
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewWriter returns a new Writer that writes to w.
|
|
||||||
func NewWriter(w io.Writer) *Writer {
|
|
||||||
return &Writer{
|
|
||||||
Comma: ',',
|
|
||||||
Quote: '"',
|
|
||||||
QuoteEscape: '"',
|
|
||||||
w: bufio.NewWriter(w),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write writes a single CSV record to w along with any necessary quoting.
|
|
||||||
// A record is a slice of strings with each string being one field.
|
|
||||||
// Writes are buffered, so Flush must eventually be called to ensure
|
|
||||||
// that the record is written to the underlying io.Writer.
|
|
||||||
func (w *Writer) Write(record []string) error {
|
|
||||||
if !validDelim(w.Comma) {
|
|
||||||
return errInvalidDelim
|
|
||||||
}
|
|
||||||
|
|
||||||
for n, field := range record {
|
|
||||||
if n > 0 {
|
|
||||||
if _, err := w.w.WriteRune(w.Comma); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we don't have to have a quoted field then just
|
|
||||||
// write out the field and continue to the next field.
|
|
||||||
if !w.AlwaysQuote && !w.fieldNeedsQuotes(field) {
|
|
||||||
if _, err := w.w.WriteString(field); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := w.w.WriteRune(w.Quote); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
specialChars := "\r\n" + string(w.Quote)
|
|
||||||
|
|
||||||
for len(field) > 0 {
|
|
||||||
// Search for special characters.
|
|
||||||
i := strings.IndexAny(field, specialChars)
|
|
||||||
if i < 0 {
|
|
||||||
i = len(field)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy verbatim everything before the special character.
|
|
||||||
if _, err := w.w.WriteString(field[:i]); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
field = field[i:]
|
|
||||||
|
|
||||||
// Encode the special character.
|
|
||||||
if len(field) > 0 {
|
|
||||||
var err error
|
|
||||||
switch nextRune([]byte(field)) {
|
|
||||||
case w.Quote:
|
|
||||||
_, err = w.w.WriteRune(w.QuoteEscape)
|
|
||||||
if err != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
_, err = w.w.WriteRune(w.Quote)
|
|
||||||
case '\r':
|
|
||||||
if !w.UseCRLF {
|
|
||||||
err = w.w.WriteByte('\r')
|
|
||||||
}
|
|
||||||
case '\n':
|
|
||||||
if w.UseCRLF {
|
|
||||||
_, err = w.w.WriteString("\r\n")
|
|
||||||
} else {
|
|
||||||
err = w.w.WriteByte('\n')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
field = field[1:]
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if _, err := w.w.WriteRune(w.Quote); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var err error
|
|
||||||
if w.UseCRLF {
|
|
||||||
_, err = w.w.WriteString("\r\n")
|
|
||||||
} else {
|
|
||||||
err = w.w.WriteByte('\n')
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flush writes any buffered data to the underlying io.Writer.
|
|
||||||
// To check if an error occurred during the Flush, call Error.
|
|
||||||
func (w *Writer) Flush() {
|
|
||||||
w.w.Flush()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Error reports any error that has occurred during a previous Write or Flush.
|
|
||||||
func (w *Writer) Error() error {
|
|
||||||
_, err := w.w.Write(nil)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteAll writes multiple CSV records to w using Write and then calls Flush,
|
|
||||||
// returning any error from the Flush.
|
|
||||||
func (w *Writer) WriteAll(records [][]string) error {
|
|
||||||
for _, record := range records {
|
|
||||||
err := w.Write(record)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return w.w.Flush()
|
|
||||||
}
|
|
||||||
|
|
||||||
// fieldNeedsQuotes reports whether our field must be enclosed in quotes.
|
|
||||||
// Fields with a Comma, fields with a quote or newline, and
|
|
||||||
// fields which start with a space must be enclosed in quotes.
|
|
||||||
// We used to quote empty strings, but we do not anymore (as of Go 1.4).
|
|
||||||
// The two representations should be equivalent, but Postgres distinguishes
|
|
||||||
// quoted vs non-quoted empty string during database imports, and it has
|
|
||||||
// an option to force the quoted behavior for non-quoted CSV but it has
|
|
||||||
// no option to force the non-quoted behavior for quoted CSV, making
|
|
||||||
// CSV with quoted empty strings strictly less useful.
|
|
||||||
// Not quoting the empty string also makes this package match the behavior
|
|
||||||
// of Microsoft Excel and Google Drive.
|
|
||||||
// For Postgres, quote the data terminating string `\.`.
|
|
||||||
func (w *Writer) fieldNeedsQuotes(field string) bool {
|
|
||||||
if field == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if field == `\.` || strings.ContainsAny(field, "\r\n"+string(w.Quote)+string(w.Comma)) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
r1, _ := utf8.DecodeRuneInString(field)
|
|
||||||
return unicode.IsSpace(r1)
|
|
||||||
}
|
|
@ -1,102 +0,0 @@
|
|||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in https://golang.org/LICENSE
|
|
||||||
|
|
||||||
package csv
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
var writeTests = []struct {
|
|
||||||
Input [][]string
|
|
||||||
Output string
|
|
||||||
Error error
|
|
||||||
UseCRLF bool
|
|
||||||
Comma rune
|
|
||||||
Quote rune
|
|
||||||
AlwaysQuote bool
|
|
||||||
}{
|
|
||||||
{Input: [][]string{{"abc"}}, Output: "abc\n"},
|
|
||||||
{Input: [][]string{{"abc"}}, Output: "abc\r\n", UseCRLF: true},
|
|
||||||
{Input: [][]string{{`"abc"`}}, Output: `"""abc"""` + "\n"},
|
|
||||||
{Input: [][]string{{`a"b`}}, Output: `"a""b"` + "\n"},
|
|
||||||
{Input: [][]string{{`"a"b"`}}, Output: `"""a""b"""` + "\n"},
|
|
||||||
{Input: [][]string{{" abc"}}, Output: `" abc"` + "\n"},
|
|
||||||
{Input: [][]string{{"abc,def"}}, Output: `"abc,def"` + "\n"},
|
|
||||||
{Input: [][]string{{"abc", "def"}}, Output: "abc,def\n"},
|
|
||||||
{Input: [][]string{{"abc"}, {"def"}}, Output: "abc\ndef\n"},
|
|
||||||
{Input: [][]string{{"abc\ndef"}}, Output: "\"abc\ndef\"\n"},
|
|
||||||
{Input: [][]string{{"abc\ndef"}}, Output: "\"abc\r\ndef\"\r\n", UseCRLF: true},
|
|
||||||
{Input: [][]string{{"abc\rdef"}}, Output: "\"abcdef\"\r\n", UseCRLF: true},
|
|
||||||
{Input: [][]string{{"abc\rdef"}}, Output: "\"abc\rdef\"\n", UseCRLF: false},
|
|
||||||
{Input: [][]string{{""}}, Output: "\n"},
|
|
||||||
{Input: [][]string{{"", ""}}, Output: ",\n"},
|
|
||||||
{Input: [][]string{{"", "", ""}}, Output: ",,\n"},
|
|
||||||
{Input: [][]string{{"", "", "a"}}, Output: ",,a\n"},
|
|
||||||
{Input: [][]string{{"", "a", ""}}, Output: ",a,\n"},
|
|
||||||
{Input: [][]string{{"", "a", "a"}}, Output: ",a,a\n"},
|
|
||||||
{Input: [][]string{{"a", "", ""}}, Output: "a,,\n"},
|
|
||||||
{Input: [][]string{{"a", "", "a"}}, Output: "a,,a\n"},
|
|
||||||
{Input: [][]string{{"a", "a", ""}}, Output: "a,a,\n"},
|
|
||||||
{Input: [][]string{{"a", "a", "a"}}, Output: "a,a,a\n"},
|
|
||||||
{Input: [][]string{{`\.`}}, Output: "\"\\.\"\n"},
|
|
||||||
{Input: [][]string{{"x09\x41\xb4\x1c", "aktau"}}, Output: "x09\x41\xb4\x1c,aktau\n"},
|
|
||||||
{Input: [][]string{{",x09\x41\xb4\x1c", "aktau"}}, Output: "\",x09\x41\xb4\x1c\",aktau\n"},
|
|
||||||
{Input: [][]string{{"a", "a", ""}}, Output: "a|a|\n", Comma: '|'},
|
|
||||||
{Input: [][]string{{",", ",", ""}}, Output: ",|,|\n", Comma: '|'},
|
|
||||||
{Input: [][]string{{"foo"}}, Comma: '"', Error: errInvalidDelim},
|
|
||||||
{Input: [][]string{{"a", "a", ""}}, Quote: '"', AlwaysQuote: true, Output: "\"a\"|\"a\"|\"\"\n", Comma: '|'},
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestWrite(t *testing.T) {
|
|
||||||
for n, tt := range writeTests {
|
|
||||||
b := &bytes.Buffer{}
|
|
||||||
f := NewWriter(b)
|
|
||||||
f.UseCRLF = tt.UseCRLF
|
|
||||||
if tt.Comma != 0 {
|
|
||||||
f.Comma = tt.Comma
|
|
||||||
}
|
|
||||||
if tt.Quote != 0 {
|
|
||||||
f.Quote = tt.Quote
|
|
||||||
}
|
|
||||||
f.AlwaysQuote = tt.AlwaysQuote
|
|
||||||
err := f.WriteAll(tt.Input)
|
|
||||||
if err != tt.Error {
|
|
||||||
t.Errorf("Unexpected error:\ngot %v\nwant %v", err, tt.Error)
|
|
||||||
}
|
|
||||||
out := b.String()
|
|
||||||
if out != tt.Output {
|
|
||||||
t.Errorf("#%d: out=%q want %q", n, out, tt.Output)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type errorWriter struct{}
|
|
||||||
|
|
||||||
func (e errorWriter) Write(b []byte) (int, error) {
|
|
||||||
return 0, errors.New("Test")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestError(t *testing.T) {
|
|
||||||
b := &bytes.Buffer{}
|
|
||||||
f := NewWriter(b)
|
|
||||||
f.Write([]string{"abc"})
|
|
||||||
f.Flush()
|
|
||||||
err := f.Error()
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("Unexpected error: %s\n", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
f = NewWriter(errorWriter{})
|
|
||||||
f.Write([]string{"abc"})
|
|
||||||
f.Flush()
|
|
||||||
err = f.Error()
|
|
||||||
|
|
||||||
if err == nil {
|
|
||||||
t.Error("Error should not be nil")
|
|
||||||
}
|
|
||||||
}
|
|
2
go.mod
2
go.mod
@ -48,7 +48,9 @@ require (
|
|||||||
github.com/mattn/go-ieproxy v0.0.1 // indirect
|
github.com/mattn/go-ieproxy v0.0.1 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.12
|
github.com/mattn/go-isatty v0.0.12
|
||||||
github.com/miekg/dns v1.1.35
|
github.com/miekg/dns v1.1.35
|
||||||
|
github.com/minio/argon2 v0.0.0-20210427164258-0025d10c2c04
|
||||||
github.com/minio/cli v1.22.0
|
github.com/minio/cli v1.22.0
|
||||||
|
github.com/minio/csvparser v0.0.0-20210427163918-ee4f0ffa388d
|
||||||
github.com/minio/highwayhash v1.0.2
|
github.com/minio/highwayhash v1.0.2
|
||||||
github.com/minio/md5-simd v1.1.1 // indirect
|
github.com/minio/md5-simd v1.1.1 // indirect
|
||||||
github.com/minio/minio-go/v7 v7.0.11-0.20210302210017-6ae69c73ce78
|
github.com/minio/minio-go/v7 v7.0.11-0.20210302210017-6ae69c73ce78
|
||||||
|
4
go.sum
4
go.sum
@ -430,8 +430,12 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5
|
|||||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||||
github.com/miekg/dns v1.1.35 h1:oTfOaDH+mZkdcgdIjH6yBajRGtIwcwcaR+rt23ZSrJs=
|
github.com/miekg/dns v1.1.35 h1:oTfOaDH+mZkdcgdIjH6yBajRGtIwcwcaR+rt23ZSrJs=
|
||||||
github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
|
||||||
|
github.com/minio/argon2 v0.0.0-20210427164258-0025d10c2c04 h1:4mrboT3YeVBHjAFia8IzmEUnzLRkemKeR0rjQ6+M9B4=
|
||||||
|
github.com/minio/argon2 v0.0.0-20210427164258-0025d10c2c04/go.mod h1:XtOGJ7MjwUJDPtCqqrisx5QwVB/jDx+adQHigJVsQHQ=
|
||||||
github.com/minio/cli v1.22.0 h1:VTQm7lmXm3quxO917X3p+el1l0Ca5X3S4PM2ruUYO68=
|
github.com/minio/cli v1.22.0 h1:VTQm7lmXm3quxO917X3p+el1l0Ca5X3S4PM2ruUYO68=
|
||||||
github.com/minio/cli v1.22.0/go.mod h1:bYxnK0uS629N3Bq+AOZZ+6lwF77Sodk4+UL9vNuXhOY=
|
github.com/minio/cli v1.22.0/go.mod h1:bYxnK0uS629N3Bq+AOZZ+6lwF77Sodk4+UL9vNuXhOY=
|
||||||
|
github.com/minio/csvparser v0.0.0-20210427163918-ee4f0ffa388d h1:Q/oXgmaJluRgq4zIht5CbAOJphFbiG+wsQcR5WmDVYM=
|
||||||
|
github.com/minio/csvparser v0.0.0-20210427163918-ee4f0ffa388d/go.mod h1:lKXskSLzPgC5WQyzP7maKH7Sl1cqvANXo9YCto8zbtM=
|
||||||
github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
|
github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
|
||||||
github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY=
|
github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY=
|
||||||
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
|
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
|
||||||
|
@ -24,7 +24,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
|
||||||
"github.com/minio/minio/contrib/pkg/argon2"
|
"github.com/minio/argon2"
|
||||||
"github.com/minio/minio/pkg/fips"
|
"github.com/minio/minio/pkg/fips"
|
||||||
"github.com/secure-io/sio-go"
|
"github.com/secure-io/sio-go"
|
||||||
"github.com/secure-io/sio-go/sioutil"
|
"github.com/secure-io/sio-go/sioutil"
|
||||||
|
@ -26,7 +26,7 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
csv "github.com/minio/minio/contrib/pkg/csvparser"
|
csv "github.com/minio/csvparser"
|
||||||
"github.com/minio/minio/pkg/s3select/sql"
|
"github.com/minio/minio/pkg/s3select/sql"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/bcicen/jstream"
|
"github.com/bcicen/jstream"
|
||||||
csv "github.com/minio/minio/contrib/pkg/csvparser"
|
csv "github.com/minio/csvparser"
|
||||||
"github.com/minio/minio/pkg/s3select/sql"
|
"github.com/minio/minio/pkg/s3select/sql"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/bcicen/jstream"
|
"github.com/bcicen/jstream"
|
||||||
csv "github.com/minio/minio/contrib/pkg/csvparser"
|
csv "github.com/minio/csvparser"
|
||||||
"github.com/minio/minio/pkg/s3select/sql"
|
"github.com/minio/minio/pkg/s3select/sql"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/bcicen/jstream"
|
"github.com/bcicen/jstream"
|
||||||
csv "github.com/minio/minio/contrib/pkg/csvparser"
|
csv "github.com/minio/csvparser"
|
||||||
"github.com/minio/minio/pkg/s3select/json"
|
"github.com/minio/minio/pkg/s3select/json"
|
||||||
"github.com/minio/minio/pkg/s3select/sql"
|
"github.com/minio/minio/pkg/s3select/sql"
|
||||||
"github.com/minio/simdjson-go"
|
"github.com/minio/simdjson-go"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user