2023-04-17 18:45:01 -04:00
|
|
|
// Copyright (c) 2015-2023 MinIO, Inc.
|
2021-04-18 15:41:13 -04:00
|
|
|
//
|
|
|
|
// This file is part of MinIO Object Storage stack
|
|
|
|
//
|
|
|
|
// This program is free software: you can redistribute it and/or modify
|
|
|
|
// it under the terms of the GNU Affero General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
//
|
|
|
|
// This program is distributed in the hope that it will be useful
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU Affero General Public License for more details.
|
|
|
|
//
|
|
|
|
// You should have received a copy of the GNU Affero General Public License
|
|
|
|
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2018-03-15 16:03:41 -04:00
|
|
|
|
|
|
|
package target
|
|
|
|
|
|
|
|
import (
|
2019-10-11 20:46:03 -04:00
|
|
|
"context"
|
2018-09-07 03:01:58 -04:00
|
|
|
"crypto/tls"
|
2019-09-18 14:13:04 -04:00
|
|
|
"crypto/x509"
|
2018-03-15 16:03:41 -04:00
|
|
|
"encoding/json"
|
2018-07-18 14:22:29 -04:00
|
|
|
"errors"
|
2022-09-27 20:23:28 -04:00
|
|
|
"fmt"
|
2019-05-29 16:19:48 -04:00
|
|
|
"net"
|
2018-03-15 16:03:41 -04:00
|
|
|
"net/url"
|
2019-05-29 16:19:48 -04:00
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2023-11-30 03:25:03 -05:00
|
|
|
"strings"
|
2023-06-07 14:47:00 -04:00
|
|
|
"sync"
|
2023-04-17 18:45:01 -04:00
|
|
|
"time"
|
2018-03-15 16:03:41 -04:00
|
|
|
|
2021-06-01 17:59:40 -04:00
|
|
|
"github.com/minio/minio/internal/event"
|
2022-07-27 12:44:59 -04:00
|
|
|
"github.com/minio/minio/internal/logger"
|
2023-05-09 00:20:31 -04:00
|
|
|
"github.com/minio/minio/internal/once"
|
2023-04-04 13:52:24 -04:00
|
|
|
"github.com/minio/minio/internal/store"
|
2023-09-04 15:57:37 -04:00
|
|
|
xnet "github.com/minio/pkg/v2/net"
|
2018-03-15 16:03:41 -04:00
|
|
|
|
2023-08-22 23:26:35 -04:00
|
|
|
"github.com/IBM/sarama"
|
|
|
|
saramatls "github.com/IBM/sarama/tools/tls"
|
2018-03-15 16:03:41 -04:00
|
|
|
)
|
|
|
|
|
2020-02-16 21:26:34 -05:00
|
|
|
// Kafka input constants
|
2019-10-23 01:59:13 -04:00
|
|
|
const (
|
2023-11-30 03:25:03 -05:00
|
|
|
KafkaBrokers = "brokers"
|
|
|
|
KafkaTopic = "topic"
|
|
|
|
KafkaQueueDir = "queue_dir"
|
|
|
|
KafkaQueueLimit = "queue_limit"
|
|
|
|
KafkaTLS = "tls"
|
|
|
|
KafkaTLSSkipVerify = "tls_skip_verify"
|
|
|
|
KafkaTLSClientAuth = "tls_client_auth"
|
|
|
|
KafkaSASL = "sasl"
|
|
|
|
KafkaSASLUsername = "sasl_username"
|
|
|
|
KafkaSASLPassword = "sasl_password"
|
|
|
|
KafkaSASLMechanism = "sasl_mechanism"
|
|
|
|
KafkaClientTLSCert = "client_tls_cert"
|
|
|
|
KafkaClientTLSKey = "client_tls_key"
|
|
|
|
KafkaVersion = "version"
|
|
|
|
KafkaBatchSize = "batch_size"
|
|
|
|
KafkaCompressionCodec = "compression_codec"
|
|
|
|
KafkaCompressionLevel = "compression_level"
|
|
|
|
|
|
|
|
EnvKafkaEnable = "MINIO_NOTIFY_KAFKA_ENABLE"
|
|
|
|
EnvKafkaBrokers = "MINIO_NOTIFY_KAFKA_BROKERS"
|
|
|
|
EnvKafkaTopic = "MINIO_NOTIFY_KAFKA_TOPIC"
|
|
|
|
EnvKafkaQueueDir = "MINIO_NOTIFY_KAFKA_QUEUE_DIR"
|
|
|
|
EnvKafkaQueueLimit = "MINIO_NOTIFY_KAFKA_QUEUE_LIMIT"
|
|
|
|
EnvKafkaTLS = "MINIO_NOTIFY_KAFKA_TLS"
|
|
|
|
EnvKafkaTLSSkipVerify = "MINIO_NOTIFY_KAFKA_TLS_SKIP_VERIFY"
|
|
|
|
EnvKafkaTLSClientAuth = "MINIO_NOTIFY_KAFKA_TLS_CLIENT_AUTH"
|
|
|
|
EnvKafkaSASLEnable = "MINIO_NOTIFY_KAFKA_SASL"
|
|
|
|
EnvKafkaSASLUsername = "MINIO_NOTIFY_KAFKA_SASL_USERNAME"
|
|
|
|
EnvKafkaSASLPassword = "MINIO_NOTIFY_KAFKA_SASL_PASSWORD"
|
|
|
|
EnvKafkaSASLMechanism = "MINIO_NOTIFY_KAFKA_SASL_MECHANISM"
|
|
|
|
EnvKafkaClientTLSCert = "MINIO_NOTIFY_KAFKA_CLIENT_TLS_CERT"
|
|
|
|
EnvKafkaClientTLSKey = "MINIO_NOTIFY_KAFKA_CLIENT_TLS_KEY"
|
|
|
|
EnvKafkaVersion = "MINIO_NOTIFY_KAFKA_VERSION"
|
|
|
|
EnvKafkaBatchSize = "MINIO_NOTIFY_KAFKA_BATCH_SIZE"
|
|
|
|
EnvKafkaProducerCompressionCodec = "MINIO_NOTIFY_KAFKA_PRODUCER_COMPRESSION_CODEC"
|
|
|
|
EnvKafkaProducerCompressionLevel = "MINIO_NOTIFY_KAFKA_PRODUCER_COMPRESSION_LEVEL"
|
2019-10-23 01:59:13 -04:00
|
|
|
)
|
|
|
|
|
2023-11-30 03:25:03 -05:00
|
|
|
var codecs = map[string]sarama.CompressionCodec{
|
|
|
|
"none": sarama.CompressionNone,
|
|
|
|
"gzip": sarama.CompressionGZIP,
|
|
|
|
"snappy": sarama.CompressionSnappy,
|
|
|
|
"lz4": sarama.CompressionLZ4,
|
|
|
|
"zstd": sarama.CompressionZSTD,
|
|
|
|
}
|
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
// KafkaArgs - Kafka target arguments.
|
|
|
|
type KafkaArgs struct {
|
2019-05-29 16:19:48 -04:00
|
|
|
Enable bool `json:"enable"`
|
|
|
|
Brokers []xnet.Host `json:"brokers"`
|
|
|
|
Topic string `json:"topic"`
|
|
|
|
QueueDir string `json:"queueDir"`
|
2019-07-11 22:53:20 -04:00
|
|
|
QueueLimit uint64 `json:"queueLimit"`
|
2020-02-16 21:26:34 -05:00
|
|
|
Version string `json:"version"`
|
2023-10-07 11:07:38 -04:00
|
|
|
BatchSize uint32 `json:"batchSize"`
|
2019-05-29 16:19:48 -04:00
|
|
|
TLS struct {
|
2019-12-05 18:31:46 -05:00
|
|
|
Enable bool `json:"enable"`
|
|
|
|
RootCAs *x509.CertPool `json:"-"`
|
|
|
|
SkipVerify bool `json:"skipVerify"`
|
|
|
|
ClientAuth tls.ClientAuthType `json:"clientAuth"`
|
|
|
|
ClientTLSCert string `json:"clientTLSCert"`
|
|
|
|
ClientTLSKey string `json:"clientTLSKey"`
|
2018-09-07 03:01:58 -04:00
|
|
|
} `json:"tls"`
|
|
|
|
SASL struct {
|
2020-03-20 14:10:27 -04:00
|
|
|
Enable bool `json:"enable"`
|
|
|
|
User string `json:"username"`
|
|
|
|
Password string `json:"password"`
|
|
|
|
Mechanism string `json:"mechanism"`
|
2018-09-07 03:01:58 -04:00
|
|
|
} `json:"sasl"`
|
2023-11-30 03:25:03 -05:00
|
|
|
Producer struct {
|
|
|
|
Compression string `json:"compression"`
|
|
|
|
CompressionLevel int `json:"compressionLevel"`
|
|
|
|
} `json:"producer"`
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
|
|
|
|
2018-07-18 14:22:29 -04:00
|
|
|
// Validate KafkaArgs fields
|
|
|
|
func (k KafkaArgs) Validate() error {
|
|
|
|
if !k.Enable {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if len(k.Brokers) == 0 {
|
|
|
|
return errors.New("no broker address found")
|
|
|
|
}
|
|
|
|
for _, b := range k.Brokers {
|
|
|
|
if _, err := xnet.ParseHost(b.String()); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2019-05-29 16:19:48 -04:00
|
|
|
if k.QueueDir != "" {
|
|
|
|
if !filepath.IsAbs(k.QueueDir) {
|
|
|
|
return errors.New("queueDir path should be absolute")
|
|
|
|
}
|
|
|
|
}
|
2020-02-16 21:26:34 -05:00
|
|
|
if k.Version != "" {
|
|
|
|
if _, err := sarama.ParseKafkaVersion(k.Version); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2023-10-07 11:07:38 -04:00
|
|
|
if k.BatchSize > 1 {
|
|
|
|
if k.QueueDir == "" {
|
|
|
|
return errors.New("batch should be enabled only if queue dir is enabled")
|
|
|
|
}
|
|
|
|
}
|
2018-07-18 14:22:29 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
// KafkaTarget - Kafka target.
|
|
|
|
type KafkaTarget struct {
|
2023-05-09 00:20:31 -04:00
|
|
|
initOnce once.Init
|
2022-09-27 20:23:28 -04:00
|
|
|
|
2024-01-24 16:10:52 -05:00
|
|
|
id event.TargetID
|
|
|
|
args KafkaArgs
|
|
|
|
producer sarama.SyncProducer
|
|
|
|
config *sarama.Config
|
|
|
|
store store.Store[event.Event]
|
|
|
|
batch *store.Batch[string, *sarama.ProducerMessage]
|
|
|
|
loggerOnce logger.LogOnce
|
|
|
|
brokerConns map[string]net.Conn
|
|
|
|
quitCh chan struct{}
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// ID - returns target ID.
|
|
|
|
func (target *KafkaTarget) ID() event.TargetID {
|
|
|
|
return target.id
|
|
|
|
}
|
|
|
|
|
2023-04-04 13:52:24 -04:00
|
|
|
// Name - returns the Name of the target.
|
|
|
|
func (target *KafkaTarget) Name() string {
|
|
|
|
return target.ID().String()
|
|
|
|
}
|
|
|
|
|
2022-11-08 19:36:47 -05:00
|
|
|
// Store returns any underlying store if set.
|
|
|
|
func (target *KafkaTarget) Store() event.TargetStore {
|
|
|
|
return target.store
|
|
|
|
}
|
|
|
|
|
2019-12-11 17:27:03 -05:00
|
|
|
// IsActive - Return true if target is up and active
|
|
|
|
func (target *KafkaTarget) IsActive() (bool, error) {
|
2022-09-27 20:23:28 -04:00
|
|
|
if err := target.init(); err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
return target.isActive()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (target *KafkaTarget) isActive() (bool, error) {
|
2024-01-24 16:10:52 -05:00
|
|
|
if err := target.pingBrokers(); err != nil {
|
2023-04-04 13:52:24 -04:00
|
|
|
return false, store.ErrNotConnected
|
2019-12-11 17:27:03 -05:00
|
|
|
}
|
|
|
|
return true, nil
|
|
|
|
}
|
|
|
|
|
2019-05-29 16:19:48 -04:00
|
|
|
// Save - saves the events to the store which will be replayed when the Kafka connection is active.
|
2019-04-10 08:46:01 -04:00
|
|
|
func (target *KafkaTarget) Save(eventData event.Event) error {
|
2019-05-29 16:19:48 -04:00
|
|
|
if target.store != nil {
|
|
|
|
return target.store.Put(eventData)
|
|
|
|
}
|
2023-05-02 10:53:13 -04:00
|
|
|
if err := target.init(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-04-10 08:46:01 -04:00
|
|
|
return target.send(eventData)
|
|
|
|
}
|
|
|
|
|
2019-05-29 16:19:48 -04:00
|
|
|
// send - sends an event to the kafka.
|
2019-04-10 08:46:01 -04:00
|
|
|
func (target *KafkaTarget) send(eventData event.Event) error {
|
2021-02-18 14:14:27 -05:00
|
|
|
if target.producer == nil {
|
2023-04-04 13:52:24 -04:00
|
|
|
return store.ErrNotConnected
|
2021-02-18 14:14:27 -05:00
|
|
|
}
|
2023-10-07 11:07:38 -04:00
|
|
|
msg, err := target.toProducerMessage(eventData)
|
2018-03-15 16:03:41 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-10-07 11:07:38 -04:00
|
|
|
_, _, err = target.producer.SendMessage(msg)
|
2018-03-15 16:03:41 -04:00
|
|
|
return err
|
|
|
|
}
|
2019-04-10 08:46:01 -04:00
|
|
|
|
2023-05-09 00:20:31 -04:00
|
|
|
// SendFromStore - reads an event from store and sends it to Kafka.
|
2023-10-07 11:07:38 -04:00
|
|
|
func (target *KafkaTarget) SendFromStore(key store.Key) error {
|
2022-09-27 20:23:28 -04:00
|
|
|
if err := target.init(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-10-07 11:07:38 -04:00
|
|
|
// If batch is enabled, the event will be batched in memory
|
|
|
|
// and will be committed once the batch is full.
|
|
|
|
if target.batch != nil {
|
|
|
|
return target.addToBatch(key)
|
|
|
|
}
|
|
|
|
|
|
|
|
eventData, eErr := target.store.Get(key.Name)
|
2019-05-29 16:19:48 -04:00
|
|
|
if eErr != nil {
|
|
|
|
// The last event key in a successful batch will be sent in the channel atmost once by the replayEvents()
|
|
|
|
// Such events will not exist and wouldve been already been sent successfully.
|
|
|
|
if os.IsNotExist(eErr) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return eErr
|
|
|
|
}
|
|
|
|
|
2024-01-24 16:10:52 -05:00
|
|
|
if err := target.send(eventData); err != nil {
|
2023-10-07 11:07:38 -04:00
|
|
|
if isKafkaConnErr(err) {
|
2023-04-04 13:52:24 -04:00
|
|
|
return store.ErrNotConnected
|
2019-05-29 16:19:48 -04:00
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete the event from store.
|
2023-10-07 11:07:38 -04:00
|
|
|
return target.store.Del(key.Name)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (target *KafkaTarget) addToBatch(key store.Key) error {
|
|
|
|
if target.batch.IsFull() {
|
|
|
|
if err := target.commitBatch(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if _, ok := target.batch.GetByKey(key.Name); !ok {
|
|
|
|
eventData, err := target.store.Get(key.Name)
|
|
|
|
if err != nil {
|
|
|
|
if os.IsNotExist(err) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
msg, err := target.toProducerMessage(eventData)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err = target.batch.Add(key.Name, msg); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// commit the batch if the key is the last one present in the store.
|
|
|
|
if key.IsLast || target.batch.IsFull() {
|
|
|
|
return target.commitBatch()
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (target *KafkaTarget) commitBatch() error {
|
|
|
|
keys, msgs, err := target.batch.GetAll()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err = target.producer.SendMessages(msgs); err != nil {
|
|
|
|
if isKafkaConnErr(err) {
|
|
|
|
return store.ErrNotConnected
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return target.store.DelList(keys)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (target *KafkaTarget) toProducerMessage(eventData event.Event) (*sarama.ProducerMessage, error) {
|
|
|
|
objectName, err := url.QueryUnescape(eventData.S3.Object.Key)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
key := eventData.S3.Bucket.Name + "/" + objectName
|
|
|
|
data, err := json.Marshal(event.Log{EventName: eventData.EventName, Key: key, Records: []event.Event{eventData}})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &sarama.ProducerMessage{
|
|
|
|
Topic: target.args.Topic,
|
|
|
|
Key: sarama.StringEncoder(key),
|
|
|
|
Value: sarama.ByteEncoder(data),
|
|
|
|
}, nil
|
2019-04-10 08:46:01 -04:00
|
|
|
}
|
2018-03-15 16:03:41 -04:00
|
|
|
|
|
|
|
// Close - closes underneath kafka connection.
|
|
|
|
func (target *KafkaTarget) Close() error {
|
2022-09-27 20:23:28 -04:00
|
|
|
close(target.quitCh)
|
2019-05-29 16:19:48 -04:00
|
|
|
if target.producer != nil {
|
|
|
|
return target.producer.Close()
|
|
|
|
}
|
2024-01-24 16:10:52 -05:00
|
|
|
for _, conn := range target.brokerConns {
|
|
|
|
if conn != nil {
|
|
|
|
conn.Close()
|
|
|
|
}
|
|
|
|
}
|
2019-05-29 16:19:48 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-01-18 02:03:17 -05:00
|
|
|
// Check if at least one broker in cluster is active
|
2024-01-24 16:10:52 -05:00
|
|
|
func (target *KafkaTarget) pingBrokers() (err error) {
|
2023-06-07 14:47:00 -04:00
|
|
|
d := net.Dialer{Timeout: 1 * time.Second}
|
|
|
|
|
2024-01-24 16:10:52 -05:00
|
|
|
errs := make([]error, len(target.args.Brokers))
|
2023-06-07 14:47:00 -04:00
|
|
|
var wg sync.WaitGroup
|
2024-01-24 16:10:52 -05:00
|
|
|
for idx, broker := range target.args.Brokers {
|
2023-06-07 14:47:00 -04:00
|
|
|
broker := broker
|
|
|
|
idx := idx
|
|
|
|
wg.Add(1)
|
|
|
|
go func(broker xnet.Host, idx int) {
|
|
|
|
defer wg.Done()
|
2024-01-24 16:10:52 -05:00
|
|
|
conn, ok := target.brokerConns[broker.String()]
|
|
|
|
if !ok || conn == nil {
|
|
|
|
conn, errs[idx] = d.Dial("tcp", broker.String())
|
|
|
|
if errs[idx] != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
target.brokerConns[broker.String()] = conn
|
|
|
|
}
|
|
|
|
if _, errs[idx] = conn.Write([]byte("")); errs[idx] != nil {
|
|
|
|
conn.Close()
|
|
|
|
target.brokerConns[broker.String()] = nil
|
|
|
|
}
|
2023-06-07 14:47:00 -04:00
|
|
|
}(broker, idx)
|
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
var retErr error
|
|
|
|
for _, err := range errs {
|
|
|
|
if err == nil {
|
|
|
|
// if one of them is active we are good.
|
|
|
|
return nil
|
2019-05-29 16:19:48 -04:00
|
|
|
}
|
2023-06-07 14:47:00 -04:00
|
|
|
retErr = err
|
2019-05-29 16:19:48 -04:00
|
|
|
}
|
2023-06-07 14:47:00 -04:00
|
|
|
return retErr
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
|
|
|
|
2022-09-27 20:23:28 -04:00
|
|
|
func (target *KafkaTarget) init() error {
|
2023-05-09 00:20:31 -04:00
|
|
|
return target.initOnce.Do(target.initKafka)
|
2022-09-27 20:23:28 -04:00
|
|
|
}
|
2018-09-07 03:01:58 -04:00
|
|
|
|
2022-09-27 20:23:28 -04:00
|
|
|
func (target *KafkaTarget) initKafka() error {
|
|
|
|
args := target.args
|
2020-04-14 14:19:25 -04:00
|
|
|
|
2022-09-27 20:23:28 -04:00
|
|
|
config := sarama.NewConfig()
|
2020-02-16 21:26:34 -05:00
|
|
|
if args.Version != "" {
|
|
|
|
kafkaVersion, err := sarama.ParseKafkaVersion(args.Version)
|
|
|
|
if err != nil {
|
2022-07-27 12:44:59 -04:00
|
|
|
target.loggerOnce(context.Background(), err, target.ID().String())
|
2022-09-27 20:23:28 -04:00
|
|
|
return err
|
2020-02-16 21:26:34 -05:00
|
|
|
}
|
|
|
|
config.Version = kafkaVersion
|
|
|
|
}
|
|
|
|
|
2023-06-07 14:47:00 -04:00
|
|
|
config.Net.KeepAlive = 60 * time.Second
|
2018-09-07 03:01:58 -04:00
|
|
|
config.Net.SASL.User = args.SASL.User
|
|
|
|
config.Net.SASL.Password = args.SASL.Password
|
2021-04-29 22:01:43 -04:00
|
|
|
initScramClient(args, config) // initializes configured scram client.
|
2018-09-07 03:01:58 -04:00
|
|
|
config.Net.SASL.Enable = args.SASL.Enable
|
|
|
|
|
2019-12-05 18:31:46 -05:00
|
|
|
tlsConfig, err := saramatls.NewConfig(args.TLS.ClientTLSCert, args.TLS.ClientTLSKey)
|
|
|
|
if err != nil {
|
2022-07-27 12:44:59 -04:00
|
|
|
target.loggerOnce(context.Background(), err, target.ID().String())
|
2022-09-27 20:23:28 -04:00
|
|
|
return err
|
2018-09-07 03:01:58 -04:00
|
|
|
}
|
2019-12-05 18:31:46 -05:00
|
|
|
|
|
|
|
config.Net.TLS.Enable = args.TLS.Enable
|
2018-09-07 03:01:58 -04:00
|
|
|
config.Net.TLS.Config = tlsConfig
|
2019-12-05 18:31:46 -05:00
|
|
|
config.Net.TLS.Config.InsecureSkipVerify = args.TLS.SkipVerify
|
|
|
|
config.Net.TLS.Config.ClientAuth = args.TLS.ClientAuth
|
|
|
|
config.Net.TLS.Config.RootCAs = args.TLS.RootCAs
|
2018-09-07 03:01:58 -04:00
|
|
|
|
2023-08-22 23:26:35 -04:00
|
|
|
// These settings are needed to ensure that kafka client doesn't hang on brokers
|
|
|
|
// refer https://github.com/IBM/sarama/issues/765#issuecomment-254333355
|
|
|
|
config.Producer.Retry.Max = 2
|
2023-10-07 11:07:38 -04:00
|
|
|
config.Producer.Retry.Backoff = (1 * time.Second)
|
2018-03-15 16:03:41 -04:00
|
|
|
config.Producer.Return.Successes = true
|
2023-08-22 23:26:35 -04:00
|
|
|
config.Producer.Return.Errors = true
|
|
|
|
config.Producer.RequiredAcks = 1
|
2023-10-07 11:07:38 -04:00
|
|
|
config.Producer.Timeout = (5 * time.Second)
|
2023-11-30 03:25:03 -05:00
|
|
|
// Set Producer Compression
|
|
|
|
cc, ok := codecs[strings.ToLower(args.Producer.Compression)]
|
|
|
|
if ok {
|
|
|
|
config.Producer.Compression = cc
|
|
|
|
config.Producer.CompressionLevel = args.Producer.CompressionLevel
|
|
|
|
}
|
|
|
|
|
2023-10-07 11:07:38 -04:00
|
|
|
config.Net.ReadTimeout = (5 * time.Second)
|
|
|
|
config.Net.DialTimeout = (5 * time.Second)
|
|
|
|
config.Net.WriteTimeout = (5 * time.Second)
|
2023-08-22 23:26:35 -04:00
|
|
|
config.Metadata.Retry.Max = 1
|
2023-10-07 11:07:38 -04:00
|
|
|
config.Metadata.Retry.Backoff = (1 * time.Second)
|
2023-08-22 23:26:35 -04:00
|
|
|
config.Metadata.RefreshFrequency = (15 * time.Minute)
|
2018-03-15 16:03:41 -04:00
|
|
|
|
2020-04-14 14:19:25 -04:00
|
|
|
target.config = config
|
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
brokers := []string{}
|
|
|
|
for _, broker := range args.Brokers {
|
|
|
|
brokers = append(brokers, broker.String())
|
|
|
|
}
|
2019-05-29 16:19:48 -04:00
|
|
|
|
2018-03-15 16:03:41 -04:00
|
|
|
producer, err := sarama.NewSyncProducer(brokers, config)
|
|
|
|
if err != nil {
|
2022-09-27 20:23:28 -04:00
|
|
|
if err != sarama.ErrOutOfBrokers {
|
2022-07-27 12:44:59 -04:00
|
|
|
target.loggerOnce(context.Background(), err, target.ID().String())
|
2019-05-29 16:19:48 -04:00
|
|
|
}
|
2022-09-27 20:23:28 -04:00
|
|
|
return err
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
2020-04-14 14:19:25 -04:00
|
|
|
target.producer = producer
|
2019-05-29 16:19:48 -04:00
|
|
|
|
2022-09-27 20:23:28 -04:00
|
|
|
yes, err := target.isActive()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if !yes {
|
2023-04-04 13:52:24 -04:00
|
|
|
return store.ErrNotConnected
|
2019-05-29 16:19:48 -04:00
|
|
|
}
|
|
|
|
|
2022-09-27 20:23:28 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewKafkaTarget - creates new Kafka target with auth credentials.
|
|
|
|
func NewKafkaTarget(id string, args KafkaArgs, loggerOnce logger.LogOnce) (*KafkaTarget, error) {
|
2023-04-04 13:52:24 -04:00
|
|
|
var queueStore store.Store[event.Event]
|
2022-09-27 20:23:28 -04:00
|
|
|
if args.QueueDir != "" {
|
|
|
|
queueDir := filepath.Join(args.QueueDir, storePrefix+"-kafka-"+id)
|
2023-04-04 13:52:24 -04:00
|
|
|
queueStore = store.NewQueueStore[event.Event](queueDir, args.QueueLimit, event.StoreExtension)
|
|
|
|
if err := queueStore.Open(); err != nil {
|
2022-09-27 20:23:28 -04:00
|
|
|
return nil, fmt.Errorf("unable to initialize the queue store of Kafka `%s`: %w", id, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-07 11:01:24 -05:00
|
|
|
target := &KafkaTarget{
|
2024-01-24 16:10:52 -05:00
|
|
|
id: event.TargetID{ID: id, Name: "kafka"},
|
|
|
|
args: args,
|
|
|
|
store: queueStore,
|
|
|
|
loggerOnce: loggerOnce,
|
|
|
|
quitCh: make(chan struct{}),
|
|
|
|
brokerConns: make(map[string]net.Conn, len(args.Brokers)),
|
2022-11-07 11:01:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if target.store != nil {
|
2023-10-07 11:07:38 -04:00
|
|
|
if args.BatchSize > 1 {
|
|
|
|
target.batch = store.NewBatch[string, *sarama.ProducerMessage](args.BatchSize)
|
|
|
|
}
|
2023-04-04 13:52:24 -04:00
|
|
|
store.StreamItems(target.store, target, target.quitCh, target.loggerOnce)
|
2022-11-07 11:01:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return target, nil
|
2018-03-15 16:03:41 -04:00
|
|
|
}
|
2023-10-07 11:07:38 -04:00
|
|
|
|
|
|
|
func isKafkaConnErr(err error) bool {
|
2024-01-18 02:03:17 -05:00
|
|
|
// Sarama opens the circuit breaker after 3 consecutive connection failures.
|
2023-10-07 11:07:38 -04:00
|
|
|
return err == sarama.ErrLeaderNotAvailable || err.Error() == "circuit breaker is open"
|
|
|
|
}
|