mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-07 01:28:27 +08:00
Add sasl configurations for kafka (#17323)
Signed-off-by: yun.zhang <yun.zhang@zilliz.com>
This commit is contained in:
parent
e88ffb8a57
commit
5698bf4236
@ -75,6 +75,8 @@ pulsar:
|
|||||||
# If you want to enable kafka, needs to comment the pulsar configs
|
# If you want to enable kafka, needs to comment the pulsar configs
|
||||||
#kafka:
|
#kafka:
|
||||||
# brokerList: localhost1:9092,localhost2:9092,localhost3:9092
|
# brokerList: localhost1:9092,localhost2:9092,localhost3:9092
|
||||||
|
# saslUsername: username
|
||||||
|
# saslPassword: password
|
||||||
|
|
||||||
rocksmq:
|
rocksmq:
|
||||||
# please adjust in embedded Milvus: /tmp/milvus/rdb_data
|
# please adjust in embedded Milvus: /tmp/milvus/rdb_data
|
||||||
|
|||||||
@ -124,17 +124,17 @@ func NewRmsFactory(path string) *RmsFactory {
|
|||||||
|
|
||||||
type KmsFactory struct {
|
type KmsFactory struct {
|
||||||
dispatcherFactory ProtoUDFactory
|
dispatcherFactory ProtoUDFactory
|
||||||
KafkaAddress string
|
config *paramtable.KafkaConfig
|
||||||
ReceiveBufSize int64
|
ReceiveBufSize int64
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *KmsFactory) NewMsgStream(ctx context.Context) (MsgStream, error) {
|
func (f *KmsFactory) NewMsgStream(ctx context.Context) (MsgStream, error) {
|
||||||
kafkaClient := kafkawrapper.NewKafkaClientInstance(f.KafkaAddress)
|
kafkaClient := kafkawrapper.NewKafkaClientInstanceWithConfig(f.config)
|
||||||
return NewMqMsgStream(ctx, f.ReceiveBufSize, -1, kafkaClient, f.dispatcherFactory.NewUnmarshalDispatcher())
|
return NewMqMsgStream(ctx, f.ReceiveBufSize, -1, kafkaClient, f.dispatcherFactory.NewUnmarshalDispatcher())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *KmsFactory) NewTtMsgStream(ctx context.Context) (MsgStream, error) {
|
func (f *KmsFactory) NewTtMsgStream(ctx context.Context) (MsgStream, error) {
|
||||||
kafkaClient := kafkawrapper.NewKafkaClientInstance(f.KafkaAddress)
|
kafkaClient := kafkawrapper.NewKafkaClientInstanceWithConfig(f.config)
|
||||||
return NewMqTtMsgStream(ctx, f.ReceiveBufSize, -1, kafkaClient, f.dispatcherFactory.NewUnmarshalDispatcher())
|
return NewMqTtMsgStream(ctx, f.ReceiveBufSize, -1, kafkaClient, f.dispatcherFactory.NewUnmarshalDispatcher())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,7 +146,7 @@ func NewKmsFactory(config *paramtable.KafkaConfig) Factory {
|
|||||||
f := &KmsFactory{
|
f := &KmsFactory{
|
||||||
dispatcherFactory: ProtoUDFactory{},
|
dispatcherFactory: ProtoUDFactory{},
|
||||||
ReceiveBufSize: 1024,
|
ReceiveBufSize: 1024,
|
||||||
KafkaAddress: config.Address,
|
config: config,
|
||||||
}
|
}
|
||||||
return f
|
return f
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/milvus-io/milvus/internal/util/paramtable"
|
||||||
|
|
||||||
"github.com/confluentinc/confluent-kafka-go/kafka"
|
"github.com/confluentinc/confluent-kafka-go/kafka"
|
||||||
"github.com/milvus-io/milvus/internal/log"
|
"github.com/milvus-io/milvus/internal/log"
|
||||||
"github.com/milvus-io/milvus/internal/mq/msgstream/mqwrapper"
|
"github.com/milvus-io/milvus/internal/mq/msgstream/mqwrapper"
|
||||||
@ -18,18 +20,38 @@ type kafkaClient struct {
|
|||||||
basicConfig kafka.ConfigMap
|
basicConfig kafka.ConfigMap
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewKafkaClientInstance(address string) *kafkaClient {
|
func getBasicConfig(address string) kafka.ConfigMap {
|
||||||
config := kafka.ConfigMap{
|
return kafka.ConfigMap{
|
||||||
"bootstrap.servers": address,
|
"bootstrap.servers": address,
|
||||||
"socket.timeout.ms": 300000,
|
"socket.timeout.ms": 300000,
|
||||||
"socket.max.fails": 3,
|
"socket.max.fails": 3,
|
||||||
//"receive.message.max.bytes": 10485760,
|
|
||||||
"api.version.request": true,
|
"api.version.request": true,
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewKafkaClientInstance(address string) *kafkaClient {
|
||||||
|
config := getBasicConfig(address)
|
||||||
return &kafkaClient{basicConfig: config}
|
return &kafkaClient{basicConfig: config}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewKafkaClientInstanceWithConfig(config *paramtable.KafkaConfig) *kafkaClient {
|
||||||
|
kafkaConfig := getBasicConfig(config.Address)
|
||||||
|
|
||||||
|
if (config.SaslUsername == "" && config.SaslPassword != "") ||
|
||||||
|
(config.SaslUsername != "" && config.SaslPassword == "") {
|
||||||
|
panic("enable security mode need config username and password at the same time!")
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.SaslUsername != "" && config.SaslPassword != "" {
|
||||||
|
kafkaConfig.SetKey("sasl.mechanisms", "PLAIN")
|
||||||
|
kafkaConfig.SetKey("security.protocol", "SASL_SSL")
|
||||||
|
kafkaConfig.SetKey("sasl.username", config.SaslUsername)
|
||||||
|
kafkaConfig.SetKey("sasl.password", config.SaslPassword)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &kafkaClient{basicConfig: kafkaConfig}
|
||||||
|
}
|
||||||
|
|
||||||
func cloneKafkaConfig(config kafka.ConfigMap) *kafka.ConfigMap {
|
func cloneKafkaConfig(config kafka.ConfigMap) *kafka.ConfigMap {
|
||||||
newConfig := make(kafka.ConfigMap)
|
newConfig := make(kafka.ConfigMap)
|
||||||
for k, v := range config {
|
for k, v := range config {
|
||||||
|
|||||||
@ -278,6 +278,19 @@ func TestKafkaClient_MsgSerializAndDeserialize(t *testing.T) {
|
|||||||
assert.Nil(t, msgID)
|
assert.Nil(t, msgID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestKafkaClient_NewKafkaClientInstanceWithConfig(t *testing.T) {
|
||||||
|
config1 := ¶mtable.KafkaConfig{Address: "addr", SaslPassword: "password"}
|
||||||
|
assert.Panics(t, func() { NewKafkaClientInstanceWithConfig(config1) })
|
||||||
|
|
||||||
|
config2 := ¶mtable.KafkaConfig{Address: "addr", SaslUsername: "username"}
|
||||||
|
assert.Panics(t, func() { NewKafkaClientInstanceWithConfig(config2) })
|
||||||
|
|
||||||
|
config3 := ¶mtable.KafkaConfig{Address: "addr", SaslUsername: "username", SaslPassword: "password"}
|
||||||
|
client := NewKafkaClientInstanceWithConfig(config3)
|
||||||
|
assert.NotNil(t, client)
|
||||||
|
assert.NotNil(t, client.basicConfig)
|
||||||
|
}
|
||||||
|
|
||||||
func createKafkaClient(t *testing.T) *kafkaClient {
|
func createKafkaClient(t *testing.T) *kafkaClient {
|
||||||
kafkaAddress, _ := Params.Load("_KafkaBrokerList")
|
kafkaAddress, _ := Params.Load("_KafkaBrokerList")
|
||||||
kc := NewKafkaClientInstance(kafkaAddress)
|
kc := NewKafkaClientInstance(kafkaAddress)
|
||||||
|
|||||||
@ -236,11 +236,15 @@ func (p *PulsarConfig) initMaxMessageSize() {
|
|||||||
type KafkaConfig struct {
|
type KafkaConfig struct {
|
||||||
Base *BaseTable
|
Base *BaseTable
|
||||||
Address string
|
Address string
|
||||||
|
SaslUsername string
|
||||||
|
SaslPassword string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (k *KafkaConfig) init(base *BaseTable) {
|
func (k *KafkaConfig) init(base *BaseTable) {
|
||||||
k.Base = base
|
k.Base = base
|
||||||
k.initAddress()
|
k.initAddress()
|
||||||
|
k.initSaslUsername()
|
||||||
|
k.initSaslPassword()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (k *KafkaConfig) initAddress() {
|
func (k *KafkaConfig) initAddress() {
|
||||||
@ -251,6 +255,14 @@ func (k *KafkaConfig) initAddress() {
|
|||||||
k.Address = addr
|
k.Address = addr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (k *KafkaConfig) initSaslUsername() {
|
||||||
|
k.SaslUsername = k.Base.LoadWithDefault("kafka.saslUsername", "")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (k *KafkaConfig) initSaslPassword() {
|
||||||
|
k.SaslPassword = k.Base.LoadWithDefault("kafka.saslPassword", "")
|
||||||
|
}
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
// --- rocksmq ---
|
// --- rocksmq ---
|
||||||
type RocksmqConfig struct {
|
type RocksmqConfig struct {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user