Merge pull request #341 from abperiasamy/janitor

This commit is contained in:
Frederick F. Kautz IV 2015-03-22 17:34:25 -07:00
commit 293e438830
4 changed files with 40 additions and 35 deletions

View File

@ -30,13 +30,13 @@ var _ = Suite(&MySuite{})
func Test(t *testing.T) { TestingT(t) } func Test(t *testing.T) { TestingT(t) }
func (s *MySuite) TestCauchyDecode(c *C) { func (s *MySuite) TestCauchyDecode(c *C) {
ep, _ := ParseEncoderParams(10, 5, Cauchy) const k, m = 10, 5
ep, _ := ParseEncoderParams(k, m, Cauchy)
data := []byte("Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.") data := []byte("Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.")
e := NewEncoder(ep) e := NewEncoder(ep)
chunks, length := e.Encode(data) chunks, _ := e.Encode(data)
c.Assert(length, Equals, len(data))
chunks[0] = nil chunks[0] = nil
chunks[3] = nil chunks[3] = nil
@ -44,7 +44,7 @@ func (s *MySuite) TestCauchyDecode(c *C) {
chunks[9] = nil chunks[9] = nil
chunks[13] = nil chunks[13] = nil
recoveredData, err := e.Decode(chunks, length) recoveredData, err := e.Decode(chunks, len(data))
c.Assert(err, IsNil) c.Assert(err, IsNil)
if !bytes.Equal(data, recoveredData) { if !bytes.Equal(data, recoveredData) {

View File

@ -39,10 +39,10 @@ func (e *Encoder) Decode(chunks [][]byte, length int) ([]byte, error) {
k := int(e.k) k := int(e.k)
n := int(e.k + e.m) n := int(e.k + e.m)
if len(chunks) != n { if len(chunks) != int(n) {
return nil, errors.New(fmt.Sprintf("chunks length must be %d", n)) return nil, errors.New(fmt.Sprintf("chunks length must be %d", n))
} }
chunk_size := getChunkSize(k, length) chunk_size := GetEncodedChunkLen(length, k)
error_index := make([]int, n+1) error_index := make([]int, n+1)
var err_count int = 0 var err_count int = 0
@ -58,7 +58,7 @@ func (e *Encoder) Decode(chunks [][]byte, length int) ([]byte, error) {
err_count++ err_count++
// Too many missing chunks, cannot be more than parity `m` // Too many missing chunks, cannot be more than parity `m`
if err_count-1 > (n - k) { if err_count-1 > int(n-k) {
return nil, errors.New("too many erasures requested, can't decode") return nil, errors.New("too many erasures requested, can't decode")
} }
@ -90,8 +90,8 @@ func (e *Encoder) Decode(chunks [][]byte, length int) ([]byte, error) {
C.ec_encode_data(C.int(chunk_size), e.k, C.int(err_count-1), decode_tbls, C.ec_encode_data(C.int(chunk_size), e.k, C.int(err_count-1), decode_tbls,
source, target) source, target)
recovered_output := make([]byte, 0, chunk_size*k) recovered_output := make([]byte, 0, chunk_size*int(k))
for i := 0; i < k; i++ { for i := 0; i < int(k); i++ {
recovered_output = append(recovered_output, chunks[i]...) recovered_output = append(recovered_output, chunks[i]...)
} }

View File

@ -116,38 +116,44 @@ func NewEncoder(ep *EncoderParams) *Encoder {
} }
} }
func getChunkSize(k, split_len int) int { func GetEncodedLen(inputLen, k, m int) (outputLen int) {
var alignment, remainder, padded_len int outputLen = GetEncodedChunkLen(inputLen, k) * (k + m)
return outputLen
}
alignment = k * SimdAlign func GetEncodedChunkLen(inputLen, k int) (outputChunkLen int) {
remainder = split_len % alignment alignment := k * SimdAlign
remainder := inputLen % alignment
padded_len = split_len paddedInputLen := inputLen
if remainder != 0 { if remainder != 0 {
padded_len = split_len + (alignment - remainder) paddedInputLen = inputLen + (alignment - remainder)
} }
return padded_len / k outputChunkLen = paddedInputLen / k
return outputChunkLen
} }
// Encode encodes a block of data. The input is the original data. The output // Encode encodes a block of data. The input is the original data. The output
// is a 2 tuple containing (k + m) chunks of erasure encoded data and the // is a 2 tuple containing (k + m) chunks of erasure encoded data and the
// length of the original object. // length of the original object.
func (e *Encoder) Encode(block []byte) ([][]byte, int) { func (e *Encoder) Encode(input []byte) ([][]byte, error) {
var block_len = len(block) var inputLen = len(input)
chunk_size := getChunkSize(int(e.k), block_len) chunkLen := GetEncodedChunkLen(inputLen, int(e.k))
chunk_len := chunk_size * int(e.k) encodedDataLen := chunkLen * int(e.k)
pad_len := int(chunk_len) - block_len paddedDataLen := int(encodedDataLen) - inputLen
if pad_len > 0 { if paddedDataLen > 0 {
s := make([]byte, pad_len) s := make([]byte, paddedDataLen)
// Expand with new padded blocks to the byte array // Expand with new padded blocks to the byte array
block = append(block, s...) input = append(input, s...)
} }
coded_len := chunk_size * int(e.p.M) encodedParityLen := chunkLen * int(e.p.M)
c := make([]byte, coded_len) c := make([]byte, encodedParityLen)
block = append(block, c...) input = append(input, c...)
// encodedOutLen := encodedDataLen + encodedParityLen
// Allocate chunks // Allocate chunks
chunks := make([][]byte, e.p.K+e.p.M) chunks := make([][]byte, e.p.K+e.p.M)
@ -156,19 +162,19 @@ func (e *Encoder) Encode(block []byte) ([][]byte, int) {
var i int var i int
// Add data blocks to chunks // Add data blocks to chunks
for i = 0; i < int(e.p.K); i++ { for i = 0; i < int(e.p.K); i++ {
chunks[i] = block[i*chunk_size : (i+1)*chunk_size] chunks[i] = input[i*chunkLen : (i+1)*chunkLen]
pointers[i] = &chunks[i][0] pointers[i] = &chunks[i][0]
} }
for i = int(e.p.K); i < int(e.p.K+e.p.M); i++ { for i = int(e.p.K); i < int(e.p.K+e.p.M); i++ {
chunks[i] = make([]byte, chunk_size) chunks[i] = make([]byte, chunkLen)
pointers[i] = &chunks[i][0] pointers[i] = &chunks[i][0]
} }
data := (**C.uint8_t)(unsafe.Pointer(&pointers[:e.p.K][0])) data := (**C.uint8_t)(unsafe.Pointer(&pointers[:e.p.K][0]))
coding := (**C.uint8_t)(unsafe.Pointer(&pointers[e.p.K:][0])) coding := (**C.uint8_t)(unsafe.Pointer(&pointers[e.p.K:][0]))
C.ec_encode_data(C.int(chunk_size), e.k, e.m, e.encode_tbls, data, C.ec_encode_data(C.int(chunkLen), e.k, e.m, e.encode_tbls, data,
coding) coding)
return chunks, block_len return chunks, nil
} }

View File

@ -28,10 +28,9 @@ func (s *MySuite) TestVanderMondeDecode(c *C) {
data := []byte("Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.") data := []byte("Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.")
e := NewEncoder(ep) e := NewEncoder(ep)
chunks, length := e.Encode(data) chunks, err := e.Encode(data)
c.Logf("chunks length: %d", len(chunks)) c.Logf("chunks length: %d", len(chunks))
c.Logf("length: %d", length) c.Logf("length: %d", len(data))
c.Assert(length, Equals, len(data))
chunks[0] = nil chunks[0] = nil
chunks[3] = nil chunks[3] = nil
@ -39,7 +38,7 @@ func (s *MySuite) TestVanderMondeDecode(c *C) {
chunks[9] = nil chunks[9] = nil
chunks[13] = nil chunks[13] = nil
recoveredData, err := e.Decode(chunks, length) recoveredData, err := e.Decode(chunks, len(data))
c.Assert(err, IsNil) c.Assert(err, IsNil)
if !bytes.Equal(recoveredData, data) { if !bytes.Equal(recoveredData, data) {