mirror of https://github.com/minio/minio.git
Merge pull request #341 from abperiasamy/janitor
This commit is contained in:
commit
293e438830
|
@ -30,13 +30,13 @@ var _ = Suite(&MySuite{})
|
|||
func Test(t *testing.T) { TestingT(t) }
|
||||
|
||||
func (s *MySuite) TestCauchyDecode(c *C) {
|
||||
ep, _ := ParseEncoderParams(10, 5, Cauchy)
|
||||
const k, m = 10, 5
|
||||
ep, _ := ParseEncoderParams(k, m, Cauchy)
|
||||
|
||||
data := []byte("Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.")
|
||||
|
||||
e := NewEncoder(ep)
|
||||
chunks, length := e.Encode(data)
|
||||
c.Assert(length, Equals, len(data))
|
||||
chunks, _ := e.Encode(data)
|
||||
|
||||
chunks[0] = nil
|
||||
chunks[3] = nil
|
||||
|
@ -44,7 +44,7 @@ func (s *MySuite) TestCauchyDecode(c *C) {
|
|||
chunks[9] = nil
|
||||
chunks[13] = nil
|
||||
|
||||
recoveredData, err := e.Decode(chunks, length)
|
||||
recoveredData, err := e.Decode(chunks, len(data))
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
if !bytes.Equal(data, recoveredData) {
|
||||
|
|
|
@ -39,10 +39,10 @@ func (e *Encoder) Decode(chunks [][]byte, length int) ([]byte, error) {
|
|||
|
||||
k := int(e.k)
|
||||
n := int(e.k + e.m)
|
||||
if len(chunks) != n {
|
||||
if len(chunks) != int(n) {
|
||||
return nil, errors.New(fmt.Sprintf("chunks length must be %d", n))
|
||||
}
|
||||
chunk_size := getChunkSize(k, length)
|
||||
chunk_size := GetEncodedChunkLen(length, k)
|
||||
|
||||
error_index := make([]int, n+1)
|
||||
var err_count int = 0
|
||||
|
@ -58,7 +58,7 @@ func (e *Encoder) Decode(chunks [][]byte, length int) ([]byte, error) {
|
|||
err_count++
|
||||
|
||||
// Too many missing chunks, cannot be more than parity `m`
|
||||
if err_count-1 > (n - k) {
|
||||
if err_count-1 > int(n-k) {
|
||||
return nil, errors.New("too many erasures requested, can't decode")
|
||||
}
|
||||
|
||||
|
@ -90,8 +90,8 @@ func (e *Encoder) Decode(chunks [][]byte, length int) ([]byte, error) {
|
|||
C.ec_encode_data(C.int(chunk_size), e.k, C.int(err_count-1), decode_tbls,
|
||||
source, target)
|
||||
|
||||
recovered_output := make([]byte, 0, chunk_size*k)
|
||||
for i := 0; i < k; i++ {
|
||||
recovered_output := make([]byte, 0, chunk_size*int(k))
|
||||
for i := 0; i < int(k); i++ {
|
||||
recovered_output = append(recovered_output, chunks[i]...)
|
||||
}
|
||||
|
||||
|
|
|
@ -116,38 +116,44 @@ func NewEncoder(ep *EncoderParams) *Encoder {
|
|||
}
|
||||
}
|
||||
|
||||
func getChunkSize(k, split_len int) int {
|
||||
var alignment, remainder, padded_len int
|
||||
func GetEncodedLen(inputLen, k, m int) (outputLen int) {
|
||||
outputLen = GetEncodedChunkLen(inputLen, k) * (k + m)
|
||||
return outputLen
|
||||
}
|
||||
|
||||
alignment = k * SimdAlign
|
||||
remainder = split_len % alignment
|
||||
func GetEncodedChunkLen(inputLen, k int) (outputChunkLen int) {
|
||||
alignment := k * SimdAlign
|
||||
remainder := inputLen % alignment
|
||||
|
||||
padded_len = split_len
|
||||
paddedInputLen := inputLen
|
||||
if remainder != 0 {
|
||||
padded_len = split_len + (alignment - remainder)
|
||||
paddedInputLen = inputLen + (alignment - remainder)
|
||||
}
|
||||
return padded_len / k
|
||||
outputChunkLen = paddedInputLen / k
|
||||
return outputChunkLen
|
||||
}
|
||||
|
||||
// Encode encodes a block of data. The input is the original data. The output
|
||||
// is a 2 tuple containing (k + m) chunks of erasure encoded data and the
|
||||
// length of the original object.
|
||||
func (e *Encoder) Encode(block []byte) ([][]byte, int) {
|
||||
var block_len = len(block)
|
||||
func (e *Encoder) Encode(input []byte) ([][]byte, error) {
|
||||
var inputLen = len(input)
|
||||
|
||||
chunk_size := getChunkSize(int(e.k), block_len)
|
||||
chunk_len := chunk_size * int(e.k)
|
||||
pad_len := int(chunk_len) - block_len
|
||||
chunkLen := GetEncodedChunkLen(inputLen, int(e.k))
|
||||
encodedDataLen := chunkLen * int(e.k)
|
||||
paddedDataLen := int(encodedDataLen) - inputLen
|
||||
|
||||
if pad_len > 0 {
|
||||
s := make([]byte, pad_len)
|
||||
if paddedDataLen > 0 {
|
||||
s := make([]byte, paddedDataLen)
|
||||
// Expand with new padded blocks to the byte array
|
||||
block = append(block, s...)
|
||||
input = append(input, s...)
|
||||
}
|
||||
|
||||
coded_len := chunk_size * int(e.p.M)
|
||||
c := make([]byte, coded_len)
|
||||
block = append(block, c...)
|
||||
encodedParityLen := chunkLen * int(e.p.M)
|
||||
c := make([]byte, encodedParityLen)
|
||||
input = append(input, c...)
|
||||
|
||||
// encodedOutLen := encodedDataLen + encodedParityLen
|
||||
|
||||
// Allocate chunks
|
||||
chunks := make([][]byte, e.p.K+e.p.M)
|
||||
|
@ -156,19 +162,19 @@ func (e *Encoder) Encode(block []byte) ([][]byte, int) {
|
|||
var i int
|
||||
// Add data blocks to chunks
|
||||
for i = 0; i < int(e.p.K); i++ {
|
||||
chunks[i] = block[i*chunk_size : (i+1)*chunk_size]
|
||||
chunks[i] = input[i*chunkLen : (i+1)*chunkLen]
|
||||
pointers[i] = &chunks[i][0]
|
||||
}
|
||||
|
||||
for i = int(e.p.K); i < int(e.p.K+e.p.M); i++ {
|
||||
chunks[i] = make([]byte, chunk_size)
|
||||
chunks[i] = make([]byte, chunkLen)
|
||||
pointers[i] = &chunks[i][0]
|
||||
}
|
||||
|
||||
data := (**C.uint8_t)(unsafe.Pointer(&pointers[:e.p.K][0]))
|
||||
coding := (**C.uint8_t)(unsafe.Pointer(&pointers[e.p.K:][0]))
|
||||
|
||||
C.ec_encode_data(C.int(chunk_size), e.k, e.m, e.encode_tbls, data,
|
||||
C.ec_encode_data(C.int(chunkLen), e.k, e.m, e.encode_tbls, data,
|
||||
coding)
|
||||
return chunks, block_len
|
||||
return chunks, nil
|
||||
}
|
||||
|
|
|
@ -28,10 +28,9 @@ func (s *MySuite) TestVanderMondeDecode(c *C) {
|
|||
data := []byte("Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.")
|
||||
|
||||
e := NewEncoder(ep)
|
||||
chunks, length := e.Encode(data)
|
||||
chunks, err := e.Encode(data)
|
||||
c.Logf("chunks length: %d", len(chunks))
|
||||
c.Logf("length: %d", length)
|
||||
c.Assert(length, Equals, len(data))
|
||||
c.Logf("length: %d", len(data))
|
||||
|
||||
chunks[0] = nil
|
||||
chunks[3] = nil
|
||||
|
@ -39,7 +38,7 @@ func (s *MySuite) TestVanderMondeDecode(c *C) {
|
|||
chunks[9] = nil
|
||||
chunks[13] = nil
|
||||
|
||||
recoveredData, err := e.Decode(chunks, length)
|
||||
recoveredData, err := e.Decode(chunks, len(data))
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
if !bytes.Equal(recoveredData, data) {
|
||||
|
|
Loading…
Reference in New Issue