2015-03-18 17:27:10 -04:00
|
|
|
package donut
|
|
|
|
|
|
|
|
import (
|
2015-03-22 03:39:39 -04:00
|
|
|
"bytes"
|
2015-03-18 17:27:10 -04:00
|
|
|
"errors"
|
|
|
|
"io"
|
2015-03-22 03:39:39 -04:00
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
|
|
|
"os"
|
|
|
|
"path"
|
|
|
|
"sort"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
2015-03-22 05:53:21 -04:00
|
|
|
|
|
|
|
"encoding/gob"
|
|
|
|
"encoding/json"
|
|
|
|
|
|
|
|
"github.com/minio-io/minio/pkg/encoding/erasure"
|
|
|
|
"github.com/minio-io/minio/pkg/utils/split"
|
2015-03-18 17:27:10 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
type donutDriver struct {
|
|
|
|
buckets map[string]Bucket
|
2015-03-22 03:39:39 -04:00
|
|
|
nodes map[string]Node
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
// NewDonutDriver - instantiate new donut driver
|
2015-03-22 03:39:39 -04:00
|
|
|
func NewDonutDriver(root string) Donut {
|
|
|
|
nodes := make(map[string]Node)
|
|
|
|
nodes["localhost"] = localDirectoryNode{root: root}
|
|
|
|
driver := donutDriver{
|
2015-03-18 17:27:10 -04:00
|
|
|
buckets: make(map[string]Bucket),
|
2015-03-22 03:39:39 -04:00
|
|
|
nodes: nodes,
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|
2015-03-22 03:39:39 -04:00
|
|
|
return driver
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 03:39:39 -04:00
|
|
|
func (driver donutDriver) CreateBucket(bucketName string) error {
|
|
|
|
if _, ok := driver.buckets[bucketName]; ok == false {
|
|
|
|
bucketName = strings.TrimSpace(bucketName)
|
|
|
|
if bucketName == "" {
|
|
|
|
return errors.New("Cannot create bucket with no name")
|
|
|
|
}
|
|
|
|
// assign nodes
|
|
|
|
// TODO assign other nodes
|
|
|
|
nodes := make([]string, 16)
|
|
|
|
for i := 0; i < 16; i++ {
|
|
|
|
nodes[i] = "localhost"
|
|
|
|
}
|
|
|
|
bucket := bucketDriver{
|
|
|
|
nodes: nodes,
|
|
|
|
}
|
|
|
|
driver.buckets[bucketName] = bucket
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return errors.New("Bucket exists")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (driver donutDriver) ListBuckets() ([]string, error) {
|
2015-03-22 05:53:21 -04:00
|
|
|
var buckets []string
|
|
|
|
for bucket := range driver.buckets {
|
2015-03-22 03:39:39 -04:00
|
|
|
buckets = append(buckets, bucket)
|
|
|
|
}
|
|
|
|
sort.Strings(buckets)
|
|
|
|
return buckets, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (driver donutDriver) GetObjectWriter(bucketName, objectName string) (ObjectWriter, error) {
|
|
|
|
if bucket, ok := driver.buckets[bucketName]; ok == true {
|
2015-03-22 05:53:21 -04:00
|
|
|
writers := make([]Writer, 16)
|
2015-03-22 03:39:39 -04:00
|
|
|
nodes, err := bucket.GetNodes()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2015-03-22 05:53:21 -04:00
|
|
|
for i, nodeID := range nodes {
|
|
|
|
if node, ok := driver.nodes[nodeID]; ok == true {
|
2015-03-22 03:39:39 -04:00
|
|
|
writer, _ := node.GetWriter(bucketName+":0:"+strconv.Itoa(i), objectName)
|
|
|
|
writers[i] = writer
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return newErasureWriter(writers), nil
|
|
|
|
}
|
|
|
|
return nil, errors.New("Bucket not found")
|
|
|
|
}
|
|
|
|
|
|
|
|
func (driver donutDriver) GetObject(bucketName, objectName string) (io.ReadCloser, error) {
|
|
|
|
r, w := io.Pipe()
|
|
|
|
if bucket, ok := driver.buckets[bucketName]; ok == true {
|
|
|
|
readers := make([]io.ReadCloser, 16)
|
|
|
|
nodes, err := bucket.GetNodes()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
var metadata map[string]string
|
2015-03-22 05:53:21 -04:00
|
|
|
for i, nodeID := range nodes {
|
|
|
|
if node, ok := driver.nodes[nodeID]; ok == true {
|
|
|
|
bucketID := bucketName + ":0:" + strconv.Itoa(i)
|
|
|
|
reader, err := node.GetReader(bucketID, objectName)
|
2015-03-22 03:39:39 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
readers[i] = reader
|
|
|
|
if metadata == nil {
|
2015-03-22 05:53:21 -04:00
|
|
|
metadata, err = node.GetDonutMetadata(bucketID, objectName)
|
2015-03-22 03:39:39 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
go erasureReader(readers, metadata, w)
|
|
|
|
return r, nil
|
|
|
|
}
|
|
|
|
return nil, errors.New("Bucket not found")
|
|
|
|
}
|
|
|
|
|
|
|
|
func erasureReader(readers []io.ReadCloser, donutMetadata map[string]string, writer *io.PipeWriter) {
|
|
|
|
totalChunks, _ := strconv.Atoi(donutMetadata["chunkCount"])
|
|
|
|
totalLeft, _ := strconv.Atoi(donutMetadata["totalLength"])
|
|
|
|
blockSize, _ := strconv.Atoi(donutMetadata["blockSize"])
|
|
|
|
params, _ := erasure.ParseEncoderParams(8, 8, erasure.Cauchy)
|
|
|
|
encoder := erasure.NewEncoder(params)
|
|
|
|
for _, reader := range readers {
|
|
|
|
defer reader.Close()
|
|
|
|
}
|
|
|
|
for i := 0; i < totalChunks; i++ {
|
|
|
|
encodedBytes := make([][]byte, 16)
|
|
|
|
for i, reader := range readers {
|
|
|
|
var bytesArray []byte
|
|
|
|
decoder := gob.NewDecoder(reader)
|
|
|
|
err := decoder.Decode(&bytesArray)
|
|
|
|
if err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
}
|
|
|
|
encodedBytes[i] = bytesArray
|
|
|
|
}
|
|
|
|
curBlockSize := totalLeft
|
|
|
|
if blockSize < totalLeft {
|
|
|
|
curBlockSize = blockSize
|
|
|
|
}
|
|
|
|
log.Println("decoding block size", curBlockSize)
|
|
|
|
decodedData, err := encoder.Decode(encodedBytes, curBlockSize)
|
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
io.Copy(writer, bytes.NewBuffer(decodedData))
|
|
|
|
totalLeft = totalLeft - blockSize
|
|
|
|
}
|
|
|
|
writer.Close()
|
|
|
|
}
|
|
|
|
|
|
|
|
// erasure writer
|
|
|
|
|
|
|
|
type erasureWriter struct {
|
2015-03-22 05:53:21 -04:00
|
|
|
writers []Writer
|
2015-03-22 03:39:39 -04:00
|
|
|
metadata map[string]string
|
|
|
|
donutMetadata map[string]string // not exposed
|
|
|
|
erasureWriter *io.PipeWriter
|
|
|
|
isClosed <-chan bool
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func newErasureWriter(writers []Writer) ObjectWriter {
|
2015-03-22 03:39:39 -04:00
|
|
|
r, w := io.Pipe()
|
|
|
|
isClosed := make(chan bool)
|
|
|
|
writer := erasureWriter{
|
|
|
|
writers: writers,
|
|
|
|
metadata: make(map[string]string),
|
|
|
|
erasureWriter: w,
|
|
|
|
isClosed: isClosed,
|
|
|
|
}
|
|
|
|
go erasureGoroutine(r, writer, isClosed)
|
|
|
|
return writer
|
|
|
|
}
|
|
|
|
|
|
|
|
func erasureGoroutine(r *io.PipeReader, eWriter erasureWriter, isClosed chan<- bool) {
|
|
|
|
chunks := split.Stream(r, 10*1024*1024)
|
|
|
|
params, _ := erasure.ParseEncoderParams(8, 8, erasure.Cauchy)
|
|
|
|
encoder := erasure.NewEncoder(params)
|
|
|
|
chunkCount := 0
|
|
|
|
totalLength := 0
|
|
|
|
for chunk := range chunks {
|
|
|
|
if chunk.Err == nil {
|
|
|
|
totalLength = totalLength + len(chunk.Data)
|
|
|
|
encodedBlocks, _ := encoder.Encode(chunk.Data)
|
|
|
|
for blockIndex, block := range encodedBlocks {
|
|
|
|
var byteBuffer bytes.Buffer
|
|
|
|
gobEncoder := gob.NewEncoder(&byteBuffer)
|
|
|
|
gobEncoder.Encode(block)
|
|
|
|
io.Copy(eWriter.writers[blockIndex], &byteBuffer)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
chunkCount = chunkCount + 1
|
|
|
|
}
|
|
|
|
metadata := make(map[string]string)
|
|
|
|
metadata["blockSize"] = strconv.Itoa(10 * 1024 * 1024)
|
|
|
|
metadata["chunkCount"] = strconv.Itoa(chunkCount)
|
|
|
|
metadata["created"] = time.Now().Format(time.RFC3339Nano)
|
|
|
|
metadata["erasureK"] = "8"
|
|
|
|
metadata["erasureM"] = "8"
|
|
|
|
metadata["erasureTechnique"] = "Cauchy"
|
|
|
|
metadata["totalLength"] = strconv.Itoa(totalLength)
|
|
|
|
for _, nodeWriter := range eWriter.writers {
|
|
|
|
if nodeWriter != nil {
|
|
|
|
nodeWriter.SetMetadata(eWriter.metadata)
|
|
|
|
nodeWriter.SetDonutMetadata(metadata)
|
|
|
|
nodeWriter.Close()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
isClosed <- true
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d erasureWriter) Write(data []byte) (int, error) {
|
|
|
|
io.Copy(d.erasureWriter, bytes.NewBuffer(data))
|
2015-03-22 03:39:39 -04:00
|
|
|
return len(data), nil
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d erasureWriter) Close() error {
|
|
|
|
d.erasureWriter.Close()
|
|
|
|
<-d.isClosed
|
2015-03-22 03:39:39 -04:00
|
|
|
return nil
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d erasureWriter) CloseWithError(err error) error {
|
|
|
|
for _, writer := range d.writers {
|
2015-03-22 03:39:39 -04:00
|
|
|
if writer != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d erasureWriter) SetMetadata(metadata map[string]string) error {
|
|
|
|
for k := range d.metadata {
|
|
|
|
delete(d.metadata, k)
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
for k, v := range metadata {
|
2015-03-22 05:53:21 -04:00
|
|
|
d.metadata[k] = v
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
return nil
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d erasureWriter) GetMetadata() (map[string]string, error) {
|
2015-03-22 03:39:39 -04:00
|
|
|
metadata := make(map[string]string)
|
2015-03-22 05:53:21 -04:00
|
|
|
for k, v := range d.metadata {
|
2015-03-22 03:39:39 -04:00
|
|
|
metadata[k] = v
|
|
|
|
}
|
|
|
|
return metadata, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type localDirectoryNode struct {
|
|
|
|
root string
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d localDirectoryNode) GetBuckets() ([]string, error) {
|
2015-03-22 03:39:39 -04:00
|
|
|
return nil, errors.New("Not Implemented")
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d localDirectoryNode) GetWriter(bucket, object string) (Writer, error) {
|
|
|
|
objectPath := path.Join(d.root, bucket, object)
|
2015-03-22 03:39:39 -04:00
|
|
|
err := os.MkdirAll(objectPath, 0700)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return newDonutFileWriter(objectPath)
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d localDirectoryNode) GetReader(bucket, object string) (io.ReadCloser, error) {
|
|
|
|
return os.Open(path.Join(d.root, bucket, object, "data"))
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d localDirectoryNode) GetMetadata(bucket, object string) (map[string]string, error) {
|
|
|
|
return d.getMetadata(bucket, object, "metadata.json")
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d localDirectoryNode) GetDonutMetadata(bucket, object string) (map[string]string, error) {
|
|
|
|
return d.getMetadata(bucket, object, "donutMetadata.json")
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d localDirectoryNode) getMetadata(bucket, object, fileName string) (map[string]string, error) {
|
|
|
|
file, err := os.Open(path.Join(d.root, bucket, object, fileName))
|
2015-03-22 03:39:39 -04:00
|
|
|
defer file.Close()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
metadata := make(map[string]string)
|
|
|
|
decoder := json.NewDecoder(file)
|
|
|
|
if err := decoder.Decode(&metadata); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return metadata, nil
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func newDonutFileWriter(objectDir string) (Writer, error) {
|
2015-03-22 03:39:39 -04:00
|
|
|
dataFile, err := os.OpenFile(path.Join(objectDir, "data"), os.O_WRONLY|os.O_CREATE, 0600)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return donutFileWriter{
|
|
|
|
root: objectDir,
|
|
|
|
file: dataFile,
|
|
|
|
metadata: make(map[string]string),
|
|
|
|
donutMetadata: make(map[string]string),
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type donutFileWriter struct {
|
|
|
|
root string
|
|
|
|
file *os.File
|
|
|
|
metadata map[string]string
|
|
|
|
donutMetadata map[string]string
|
|
|
|
err error
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d donutFileWriter) Write(data []byte) (int, error) {
|
|
|
|
return d.file.Write(data)
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d donutFileWriter) Close() error {
|
|
|
|
if d.err != nil {
|
|
|
|
return d.err
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
d.file.Close()
|
2015-03-22 03:39:39 -04:00
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
metadata, _ := json.Marshal(d.metadata)
|
|
|
|
ioutil.WriteFile(path.Join(d.root, "metadata.json"), metadata, 0600)
|
|
|
|
donutMetadata, _ := json.Marshal(d.donutMetadata)
|
|
|
|
ioutil.WriteFile(path.Join(d.root, "donutMetadata.json"), donutMetadata, 0600)
|
2015-03-22 03:39:39 -04:00
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d donutFileWriter) CloseWithError(err error) error {
|
|
|
|
if d.err != nil {
|
|
|
|
d.err = err
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
2015-03-22 05:53:21 -04:00
|
|
|
d.file.Close()
|
2015-03-22 03:39:39 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d donutFileWriter) SetMetadata(metadata map[string]string) error {
|
|
|
|
for k := range d.metadata {
|
|
|
|
delete(d.metadata, k)
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
for k, v := range metadata {
|
2015-03-22 05:53:21 -04:00
|
|
|
d.metadata[k] = v
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d donutFileWriter) GetMetadata() (map[string]string, error) {
|
2015-03-22 03:39:39 -04:00
|
|
|
metadata := make(map[string]string)
|
2015-03-22 05:53:21 -04:00
|
|
|
for k, v := range d.metadata {
|
2015-03-22 03:39:39 -04:00
|
|
|
metadata[k] = v
|
|
|
|
}
|
|
|
|
return metadata, nil
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d donutFileWriter) SetDonutMetadata(metadata map[string]string) error {
|
|
|
|
for k := range d.donutMetadata {
|
|
|
|
delete(d.donutMetadata, k)
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
for k, v := range metadata {
|
2015-03-22 05:53:21 -04:00
|
|
|
d.donutMetadata[k] = v
|
2015-03-22 03:39:39 -04:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2015-03-22 05:53:21 -04:00
|
|
|
func (d donutFileWriter) GetDonutMetadata() (map[string]string, error) {
|
2015-03-22 03:39:39 -04:00
|
|
|
donutMetadata := make(map[string]string)
|
2015-03-22 05:53:21 -04:00
|
|
|
for k, v := range d.donutMetadata {
|
2015-03-22 03:39:39 -04:00
|
|
|
donutMetadata[k] = v
|
|
|
|
}
|
|
|
|
return donutMetadata, nil
|
2015-03-18 17:27:10 -04:00
|
|
|
}
|