2018-03-15 16:03:41 -04:00
|
|
|
/*
|
2019-04-09 14:39:42 -04:00
|
|
|
* MinIO Cloud Storage, (C) 2018 MinIO, Inc.
|
2018-03-15 16:03:41 -04:00
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
*/
|
|
|
|
|
|
|
|
package target
|
|
|
|
|
|
|
|
import (
|
2019-10-11 20:46:03 -04:00
|
|
|
"context"
|
2018-09-07 03:01:58 -04:00
|
|
|
"crypto/tls"
|
2019-09-18 14:13:04 -04:00
|
|
|
"crypto/x509"
|
2018-03-15 16:03:41 -04:00
|
|
|
"encoding/json"
|
2018-07-18 14:22:29 -04:00
|
|
|
"errors"
|
2019-05-29 16:19:48 -04:00
|
|
|
"net"
|
2018-03-15 16:03:41 -04:00
|
|
|
"net/url"
|
2019-05-29 16:19:48 -04:00
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2018-03-15 16:03:41 -04:00
|
|
|
|
|
|
|
"github.com/minio/minio/pkg/event"
|
|
|
|
xnet "github.com/minio/minio/pkg/net"
|
|
|
|
|
2019-12-05 18:31:46 -05:00
|
|
|
sarama "github.com/Shopify/sarama"
|
|
|
|
saramatls "github.com/Shopify/sarama/tools/tls"
|
2018-03-15 16:03:41 -04:00
|
|
|
)
|
|
|
|
|
2019-10-23 01:59:13 -04:00
|
|
|
// MQTT input constants
|
|
|
|
const (
|
|
|
|
KafkaBrokers = "brokers"
|
|
|
|
KafkaTopic = "topic"
|
|
|
|
KafkaQueueDir = "queue_dir"
|
|
|
|
KafkaQueueLimit = "queue_limit"
|
2019-11-13 20:38:05 -05:00
|
|
|
KafkaTLS = "tls"
|
2019-10-23 01:59:13 -04:00
|
|
|
KafkaTLSSkipVerify = "tls_skip_verify"
|
|
|
|
KafkaTLSClientAuth = "tls_client_auth"
|
2019-11-13 20:38:05 -05:00
|
|
|
KafkaSASL = "sasl"
|
2019-10-23 01:59:13 -04:00
|
|
|
KafkaSASLUsername = "sasl_username"
|
|
|
|
KafkaSASLPassword = "sasl_password"
|
2019-12-05 18:31:46 -05:00
|
|
|
KafkaClientTLSCert = "client_tls_cert"
|
|
|
|
KafkaClientTLSKey = "client_tls_key"
|
2019-10-23 01:59:13 -04:00
|
|
|
|
2019-12-04 18:32:37 -05:00
|
|
|
EnvKafkaEnable = "MINIO_NOTIFY_KAFKA_ENABLE"
|
2019-10-23 01:59:13 -04:00
|
|
|
EnvKafkaBrokers = "MINIO_NOTIFY_KAFKA_BROKERS"
|
|
|
|
EnvKafkaTopic = "MINIO_NOTIFY_KAFKA_TOPIC"
|
|
|
|
EnvKafkaQueueDir = "MINIO_NOTIFY_KAFKA_QUEUE_DIR"
|
|
|
|
EnvKafkaQueueLimit = "MINIO_NOTIFY_KAFKA_QUEUE_LIMIT"
|
2019-11-13 20:38:05 -05:00
|
|
|
EnvKafkaTLS = "MINIO_NOTIFY_KAFKA_TLS"
|
2019-10-23 01:59:13 -04:00
|
|
|
EnvKafkaTLSSkipVerify = "MINIO_NOTIFY_KAFKA_TLS_SKIP_VERIFY"
|
|
|
|
EnvKafkaTLSClientAuth = "MINIO_NOTIFY_KAFKA_TLS_CLIENT_AUTH"
|
2019-11-13 20:38:05 -05:00
|
|
|
EnvKafkaSASLEnable = "MINIO_NOTIFY_KAFKA_SASL"
|
2019-10-23 01:59:13 -04:00
|
|
|
EnvKafkaSASLUsername = "MINIO_NOTIFY_KAFKA_SASL_USERNAME"
|
|
|
|
EnvKafkaSASLPassword = "MINIO_NOTIFY_KAFKA_SASL_PASSWORD"
|
2019-12-05 18:31:46 -05:00
|
|
|
EnvKafkaClientTLSCert = "MINIO_NOTIFY_KAFKA_CLIENT_TLS_CERT"
|
|
|
|
EnvKafkaClientTLSKey = "MINIO_NOTIFY_KAFKA_CLIENT_TLS_KEY"
|
2019-10-23 01:59:13 -04:00
|
|
|
)
|
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
// KafkaArgs - Kafka target arguments.
|
|
|
|
type KafkaArgs struct {
|
2019-05-29 16:19:48 -04:00
|
|
|
Enable bool `json:"enable"`
|
|
|
|
Brokers []xnet.Host `json:"brokers"`
|
|
|
|
Topic string `json:"topic"`
|
|
|
|
QueueDir string `json:"queueDir"`
|
2019-07-11 22:53:20 -04:00
|
|
|
QueueLimit uint64 `json:"queueLimit"`
|
2019-05-29 16:19:48 -04:00
|
|
|
TLS struct {
|
2019-12-05 18:31:46 -05:00
|
|
|
Enable bool `json:"enable"`
|
|
|
|
RootCAs *x509.CertPool `json:"-"`
|
|
|
|
SkipVerify bool `json:"skipVerify"`
|
|
|
|
ClientAuth tls.ClientAuthType `json:"clientAuth"`
|
|
|
|
ClientTLSCert string `json:"clientTLSCert"`
|
|
|
|
ClientTLSKey string `json:"clientTLSKey"`
|
2018-09-07 03:01:58 -04:00
|
|
|
} `json:"tls"`
|
|
|
|
SASL struct {
|
|
|
|
Enable bool `json:"enable"`
|
|
|
|
User string `json:"username"`
|
|
|
|
Password string `json:"password"`
|
|
|
|
} `json:"sasl"`
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
|
|
|
|
2018-07-18 14:22:29 -04:00
|
|
|
// Validate KafkaArgs fields
|
|
|
|
func (k KafkaArgs) Validate() error {
|
|
|
|
if !k.Enable {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if len(k.Brokers) == 0 {
|
|
|
|
return errors.New("no broker address found")
|
|
|
|
}
|
|
|
|
for _, b := range k.Brokers {
|
|
|
|
if _, err := xnet.ParseHost(b.String()); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2019-05-29 16:19:48 -04:00
|
|
|
if k.QueueDir != "" {
|
|
|
|
if !filepath.IsAbs(k.QueueDir) {
|
|
|
|
return errors.New("queueDir path should be absolute")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if k.QueueLimit > 10000 {
|
|
|
|
return errors.New("queueLimit should not exceed 10000")
|
|
|
|
}
|
2018-07-18 14:22:29 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
// KafkaTarget - Kafka target.
|
|
|
|
type KafkaTarget struct {
|
|
|
|
id event.TargetID
|
|
|
|
args KafkaArgs
|
|
|
|
producer sarama.SyncProducer
|
2019-05-29 16:19:48 -04:00
|
|
|
config *sarama.Config
|
|
|
|
store Store
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// ID - returns target ID.
|
|
|
|
func (target *KafkaTarget) ID() event.TargetID {
|
|
|
|
return target.id
|
|
|
|
}
|
|
|
|
|
2019-12-11 17:27:03 -05:00
|
|
|
// IsActive - Return true if target is up and active
|
|
|
|
func (target *KafkaTarget) IsActive() (bool, error) {
|
|
|
|
if !target.args.pingBrokers() {
|
|
|
|
return false, errNotConnected
|
|
|
|
}
|
|
|
|
return true, nil
|
|
|
|
}
|
|
|
|
|
2019-05-29 16:19:48 -04:00
|
|
|
// Save - saves the events to the store which will be replayed when the Kafka connection is active.
|
2019-04-10 08:46:01 -04:00
|
|
|
func (target *KafkaTarget) Save(eventData event.Event) error {
|
2019-05-29 16:19:48 -04:00
|
|
|
if target.store != nil {
|
|
|
|
return target.store.Put(eventData)
|
|
|
|
}
|
2019-12-11 17:27:03 -05:00
|
|
|
_, err := target.IsActive()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2019-05-29 16:19:48 -04:00
|
|
|
}
|
2019-04-10 08:46:01 -04:00
|
|
|
return target.send(eventData)
|
|
|
|
}
|
|
|
|
|
2019-05-29 16:19:48 -04:00
|
|
|
// send - sends an event to the kafka.
|
2019-04-10 08:46:01 -04:00
|
|
|
func (target *KafkaTarget) send(eventData event.Event) error {
|
2018-03-15 16:03:41 -04:00
|
|
|
objectName, err := url.QueryUnescape(eventData.S3.Object.Key)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
key := eventData.S3.Bucket.Name + "/" + objectName
|
|
|
|
|
2018-11-14 13:23:44 -05:00
|
|
|
data, err := json.Marshal(event.Log{EventName: eventData.EventName, Key: key, Records: []event.Event{eventData}})
|
2018-03-15 16:03:41 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
msg := sarama.ProducerMessage{
|
|
|
|
Topic: target.args.Topic,
|
|
|
|
Key: sarama.StringEncoder(key),
|
|
|
|
Value: sarama.ByteEncoder(data),
|
|
|
|
}
|
2019-05-29 16:19:48 -04:00
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
_, _, err = target.producer.SendMessage(&msg)
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
2019-04-10 08:46:01 -04:00
|
|
|
|
2019-05-29 16:19:48 -04:00
|
|
|
// Send - reads an event from store and sends it to Kafka.
|
2019-04-10 08:46:01 -04:00
|
|
|
func (target *KafkaTarget) Send(eventKey string) error {
|
2019-05-29 16:19:48 -04:00
|
|
|
var err error
|
2019-12-11 17:27:03 -05:00
|
|
|
_, err = target.IsActive()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2019-05-29 16:19:48 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if target.producer == nil {
|
|
|
|
brokers := []string{}
|
|
|
|
for _, broker := range target.args.Brokers {
|
|
|
|
brokers = append(brokers, broker.String())
|
|
|
|
}
|
|
|
|
target.producer, err = sarama.NewSyncProducer(brokers, target.config)
|
|
|
|
if err != nil {
|
|
|
|
if err != sarama.ErrOutOfBrokers {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return errNotConnected
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
eventData, eErr := target.store.Get(eventKey)
|
|
|
|
if eErr != nil {
|
|
|
|
// The last event key in a successful batch will be sent in the channel atmost once by the replayEvents()
|
|
|
|
// Such events will not exist and wouldve been already been sent successfully.
|
|
|
|
if os.IsNotExist(eErr) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return eErr
|
|
|
|
}
|
|
|
|
|
|
|
|
err = target.send(eventData)
|
|
|
|
if err != nil {
|
|
|
|
// Sarama opens the ciruit breaker after 3 consecutive connection failures.
|
|
|
|
if err == sarama.ErrLeaderNotAvailable || err.Error() == "circuit breaker is open" {
|
|
|
|
return errNotConnected
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete the event from store.
|
|
|
|
return target.store.Del(eventKey)
|
2019-04-10 08:46:01 -04:00
|
|
|
}
|
2018-03-15 16:03:41 -04:00
|
|
|
|
|
|
|
// Close - closes underneath kafka connection.
|
|
|
|
func (target *KafkaTarget) Close() error {
|
2019-05-29 16:19:48 -04:00
|
|
|
if target.producer != nil {
|
|
|
|
return target.producer.Close()
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check if atleast one broker in cluster is active
|
|
|
|
func (k KafkaArgs) pingBrokers() bool {
|
|
|
|
|
|
|
|
for _, broker := range k.Brokers {
|
|
|
|
_, dErr := net.Dial("tcp", broker.String())
|
|
|
|
if dErr == nil {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
|
|
|
|
2018-09-07 03:01:58 -04:00
|
|
|
// NewKafkaTarget - creates new Kafka target with auth credentials.
|
2019-10-11 22:07:09 -04:00
|
|
|
func NewKafkaTarget(id string, args KafkaArgs, doneCh <-chan struct{}, loggerOnce func(ctx context.Context, err error, id interface{}, kind ...interface{})) (*KafkaTarget, error) {
|
2018-03-15 16:03:41 -04:00
|
|
|
config := sarama.NewConfig()
|
2018-09-07 03:01:58 -04:00
|
|
|
|
|
|
|
config.Net.SASL.User = args.SASL.User
|
|
|
|
config.Net.SASL.Password = args.SASL.Password
|
|
|
|
config.Net.SASL.Enable = args.SASL.Enable
|
|
|
|
|
2019-12-05 18:31:46 -05:00
|
|
|
tlsConfig, err := saramatls.NewConfig(args.TLS.ClientTLSCert, args.TLS.ClientTLSKey)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2018-09-07 03:01:58 -04:00
|
|
|
}
|
2019-12-05 18:31:46 -05:00
|
|
|
|
|
|
|
config.Net.TLS.Enable = args.TLS.Enable
|
2018-09-07 03:01:58 -04:00
|
|
|
config.Net.TLS.Config = tlsConfig
|
2019-12-05 18:31:46 -05:00
|
|
|
config.Net.TLS.Config.InsecureSkipVerify = args.TLS.SkipVerify
|
|
|
|
config.Net.TLS.Config.ClientAuth = args.TLS.ClientAuth
|
|
|
|
config.Net.TLS.Config.RootCAs = args.TLS.RootCAs
|
2018-09-07 03:01:58 -04:00
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
config.Producer.RequiredAcks = sarama.WaitForAll
|
|
|
|
config.Producer.Retry.Max = 10
|
|
|
|
config.Producer.Return.Successes = true
|
|
|
|
|
|
|
|
brokers := []string{}
|
|
|
|
for _, broker := range args.Brokers {
|
|
|
|
brokers = append(brokers, broker.String())
|
|
|
|
}
|
2019-05-29 16:19:48 -04:00
|
|
|
|
|
|
|
var store Store
|
|
|
|
|
|
|
|
if args.QueueDir != "" {
|
|
|
|
queueDir := filepath.Join(args.QueueDir, storePrefix+"-kafka-"+id)
|
|
|
|
store = NewQueueStore(queueDir, args.QueueLimit)
|
|
|
|
if oErr := store.Open(); oErr != nil {
|
|
|
|
return nil, oErr
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
producer, err := sarama.NewSyncProducer(brokers, config)
|
|
|
|
if err != nil {
|
2019-05-29 16:19:48 -04:00
|
|
|
if store == nil || err != sarama.ErrOutOfBrokers {
|
|
|
|
return nil, err
|
|
|
|
}
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
|
|
|
|
2019-05-29 16:19:48 -04:00
|
|
|
target := &KafkaTarget{
|
2018-11-14 13:23:44 -05:00
|
|
|
id: event.TargetID{ID: id, Name: "kafka"},
|
2018-03-15 16:03:41 -04:00
|
|
|
args: args,
|
|
|
|
producer: producer,
|
2019-05-29 16:19:48 -04:00
|
|
|
config: config,
|
|
|
|
store: store,
|
|
|
|
}
|
|
|
|
|
|
|
|
if target.store != nil {
|
|
|
|
// Replays the events from the store.
|
2019-10-11 20:46:03 -04:00
|
|
|
eventKeyCh := replayEvents(target.store, doneCh, loggerOnce, target.ID())
|
2019-05-29 16:19:48 -04:00
|
|
|
// Start replaying events from the store.
|
2019-10-11 20:46:03 -04:00
|
|
|
go sendEvents(target, eventKeyCh, doneCh, loggerOnce)
|
2019-05-29 16:19:48 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
return target, nil
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|