enhance: transfer resource group and dbname to querynode when load (#30936)

issue: #30931

Signed-off-by: chyezh <chyezh@outlook.com>
This commit is contained in:
chyezh 2024-03-21 11:59:12 +08:00 committed by GitHub
parent 982dd2834b
commit 9f9ef8ac32
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
31 changed files with 333 additions and 158 deletions

View File

@ -296,6 +296,8 @@ message LoadMetaInfo {
int64 collectionID = 2; int64 collectionID = 2;
repeated int64 partitionIDs = 3; repeated int64 partitionIDs = 3;
string metric_type = 4 [deprecated = true]; string metric_type = 4 [deprecated = true];
string db_name = 5; // Only used for metrics label.
string resource_group = 6; // Only used for metrics label.
} }
message WatchDmChannelsRequest { message WatchDmChannelsRequest {

View File

@ -27,26 +27,26 @@ import (
type SegmentAssignPlan struct { type SegmentAssignPlan struct {
Segment *meta.Segment Segment *meta.Segment
ReplicaID int64 Replica *meta.Replica
From int64 // -1 if empty From int64 // -1 if empty
To int64 To int64
} }
func (segPlan *SegmentAssignPlan) ToString() string { func (segPlan *SegmentAssignPlan) ToString() string {
return fmt.Sprintf("SegmentPlan:[collectionID: %d, replicaID: %d, segmentID: %d, from: %d, to: %d]\n", return fmt.Sprintf("SegmentPlan:[collectionID: %d, replicaID: %d, segmentID: %d, from: %d, to: %d]\n",
segPlan.Segment.CollectionID, segPlan.ReplicaID, segPlan.Segment.ID, segPlan.From, segPlan.To) segPlan.Segment.CollectionID, segPlan.Replica.GetID(), segPlan.Segment.ID, segPlan.From, segPlan.To)
} }
type ChannelAssignPlan struct { type ChannelAssignPlan struct {
Channel *meta.DmChannel Channel *meta.DmChannel
ReplicaID int64 Replica *meta.Replica
From int64 From int64
To int64 To int64
} }
func (chanPlan *ChannelAssignPlan) ToString() string { func (chanPlan *ChannelAssignPlan) ToString() string {
return fmt.Sprintf("ChannelPlan:[collectionID: %d, channel: %s, replicaID: %d, from: %d, to: %d]\n", return fmt.Sprintf("ChannelPlan:[collectionID: %d, channel: %s, replicaID: %d, from: %d, to: %d]\n",
chanPlan.Channel.CollectionID, chanPlan.Channel.ChannelName, chanPlan.ReplicaID, chanPlan.From, chanPlan.To) chanPlan.Channel.CollectionID, chanPlan.Channel.ChannelName, chanPlan.Replica.GetID(), chanPlan.From, chanPlan.To)
} }
var ( var (

View File

@ -218,7 +218,7 @@ func (b *RowCountBasedBalancer) genStoppingSegmentPlan(replica *meta.Replica, on
plans := b.AssignSegment(replica.CollectionID, segments, onlineNodes) plans := b.AssignSegment(replica.CollectionID, segments, onlineNodes)
for i := range plans { for i := range plans {
plans[i].From = nodeID plans[i].From = nodeID
plans[i].ReplicaID = replica.ID plans[i].Replica = replica
} }
segmentPlans = append(segmentPlans, plans...) segmentPlans = append(segmentPlans, plans...)
} }
@ -286,7 +286,7 @@ func (b *RowCountBasedBalancer) genSegmentPlan(replica *meta.Replica, onlineNode
segmentPlans := b.AssignSegment(replica.CollectionID, segmentsToMove, nodesWithLessRow) segmentPlans := b.AssignSegment(replica.CollectionID, segmentsToMove, nodesWithLessRow)
for i := range segmentPlans { for i := range segmentPlans {
segmentPlans[i].From = segmentPlans[i].Segment.Node segmentPlans[i].From = segmentPlans[i].Segment.Node
segmentPlans[i].ReplicaID = replica.ID segmentPlans[i].Replica = replica
} }
return segmentPlans return segmentPlans
@ -299,7 +299,7 @@ func (b *RowCountBasedBalancer) genStoppingChannelPlan(replica *meta.Replica, on
plans := b.AssignChannel(dmChannels, onlineNodes) plans := b.AssignChannel(dmChannels, onlineNodes)
for i := range plans { for i := range plans {
plans[i].From = nodeID plans[i].From = nodeID
plans[i].ReplicaID = replica.ID plans[i].Replica = replica
} }
channelPlans = append(channelPlans, plans...) channelPlans = append(channelPlans, plans...)
} }
@ -337,7 +337,7 @@ func (b *RowCountBasedBalancer) genChannelPlan(replica *meta.Replica, onlineNode
channelPlans := b.AssignChannel(channelsToMove, nodeWithLessChannel) channelPlans := b.AssignChannel(channelsToMove, nodeWithLessChannel)
for i := range channelPlans { for i := range channelPlans {
channelPlans[i].From = channelPlans[i].Channel.Node channelPlans[i].From = channelPlans[i].Channel.Node
channelPlans[i].ReplicaID = replica.ID channelPlans[i].Replica = replica
} }
return channelPlans return channelPlans

View File

@ -36,6 +36,7 @@ import (
"github.com/milvus-io/milvus/internal/querycoordv2/utils" "github.com/milvus-io/milvus/internal/querycoordv2/utils"
"github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/etcd"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/typeutil"
) )
type RowCountBasedBalancerTestSuite struct { type RowCountBasedBalancerTestSuite struct {
@ -136,7 +137,7 @@ func (suite *RowCountBasedBalancerTestSuite) TestAssignSegment() {
suite.balancer.nodeManager.Add(nodeInfo) suite.balancer.nodeManager.Add(nodeInfo)
} }
plans := balancer.AssignSegment(0, c.assignments, c.nodes) plans := balancer.AssignSegment(0, c.assignments, c.nodes)
suite.ElementsMatch(c.expectPlans, plans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, plans)
}) })
} }
} }
@ -168,7 +169,7 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalance() {
}, },
}, },
expectPlans: []SegmentAssignPlan{ expectPlans: []SegmentAssignPlan{
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2}, From: 2, To: 1, ReplicaID: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2}, From: 2, To: 1, Replica: newReplicaDefaultRG(1)},
}, },
expectChannelPlans: []ChannelAssignPlan{}, expectChannelPlans: []ChannelAssignPlan{},
}, },
@ -248,7 +249,7 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalance() {
}, },
expectPlans: []SegmentAssignPlan{}, expectPlans: []SegmentAssignPlan{},
expectChannelPlans: []ChannelAssignPlan{ expectChannelPlans: []ChannelAssignPlan{
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
}, },
}, },
{ {
@ -277,8 +278,8 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalance() {
}, },
}, },
expectPlans: []SegmentAssignPlan{ expectPlans: []SegmentAssignPlan{
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
}, },
expectChannelPlans: []ChannelAssignPlan{}, expectChannelPlans: []ChannelAssignPlan{},
}, },
@ -298,7 +299,7 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalance() {
}, },
expectPlans: []SegmentAssignPlan{}, expectPlans: []SegmentAssignPlan{},
expectChannelPlans: []ChannelAssignPlan{ expectChannelPlans: []ChannelAssignPlan{
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2}, From: 2, To: 3, ReplicaID: 1}, {Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2}, From: 2, To: 3, Replica: newReplicaDefaultRG(1)},
}, },
}, },
{ {
@ -340,8 +341,8 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalance() {
}, },
expectPlans: []SegmentAssignPlan{}, expectPlans: []SegmentAssignPlan{},
expectChannelPlans: []ChannelAssignPlan{ expectChannelPlans: []ChannelAssignPlan{
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 1}, From: 1, To: 2, ReplicaID: 1}, {Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 1}, From: 1, To: 2, Replica: newReplicaDefaultRG(1)},
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 1}, From: 1, To: 3, ReplicaID: 1}, {Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v2"}, Node: 1}, From: 1, To: 3, Replica: newReplicaDefaultRG(1)},
}, },
multiple: true, multiple: true,
}, },
@ -426,11 +427,11 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalance() {
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1) segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
if !c.multiple { if !c.multiple {
suite.ElementsMatch(c.expectChannelPlans, channelPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
suite.ElementsMatch(c.expectPlans, segmentPlans) assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
} else { } else {
suite.Subset(c.expectPlans, segmentPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans, true)
suite.Subset(c.expectChannelPlans, channelPlans) assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans, true)
} }
// clear distribution // clear distribution
@ -527,8 +528,8 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnPartStopping() {
}, },
}, },
expectPlans: []SegmentAssignPlan{ expectPlans: []SegmentAssignPlan{
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
}, },
expectChannelPlans: []ChannelAssignPlan{}, expectChannelPlans: []ChannelAssignPlan{},
}, },
@ -591,7 +592,7 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnPartStopping() {
}, },
expectPlans: []SegmentAssignPlan{}, expectPlans: []SegmentAssignPlan{},
expectChannelPlans: []ChannelAssignPlan{ expectChannelPlans: []ChannelAssignPlan{
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
}, },
}, },
} }
@ -635,8 +636,8 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnPartStopping() {
suite.balancer.meta.ResourceManager.AssignNode(meta.DefaultResourceGroupName, c.nodes[i]) suite.balancer.meta.ResourceManager.AssignNode(meta.DefaultResourceGroupName, c.nodes[i])
} }
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1) segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
suite.ElementsMatch(c.expectChannelPlans, channelPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
suite.ElementsMatch(c.expectPlans, segmentPlans) assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
}) })
} }
} }
@ -681,7 +682,7 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOutboundNodes() {
}, },
expectPlans: []SegmentAssignPlan{}, expectPlans: []SegmentAssignPlan{},
expectChannelPlans: []ChannelAssignPlan{ expectChannelPlans: []ChannelAssignPlan{
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
}, },
}, },
{ {
@ -710,8 +711,8 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOutboundNodes() {
}, },
}, },
expectPlans: []SegmentAssignPlan{ expectPlans: []SegmentAssignPlan{
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, ReplicaID: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, CollectionID: 1, NumOfRows: 10}, Node: 3}, From: 3, To: 1, Replica: newReplicaDefaultRG(1)},
}, },
expectChannelPlans: []ChannelAssignPlan{}, expectChannelPlans: []ChannelAssignPlan{},
}, },
@ -780,8 +781,8 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOutboundNodes() {
err = balancer.meta.ResourceManager.AssignNode(meta.DefaultResourceGroupName, 2) err = balancer.meta.ResourceManager.AssignNode(meta.DefaultResourceGroupName, 2)
suite.NoError(err) suite.NoError(err)
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1) segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
suite.ElementsMatch(c.expectChannelPlans, channelPlans) assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
suite.ElementsMatch(c.expectPlans, segmentPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
// clean up distribution for next test // clean up distribution for next test
for node := range c.distributions { for node := range c.distributions {
@ -829,7 +830,7 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnLoadingCollection() {
} }
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1) segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
suite.Empty(channelPlans) suite.Empty(channelPlans)
suite.ElementsMatch(c.expectPlans, segmentPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
}) })
} }
} }
@ -924,7 +925,7 @@ func (suite *RowCountBasedBalancerTestSuite) TestDisableBalanceChannel() {
}, },
expectPlans: []SegmentAssignPlan{}, expectPlans: []SegmentAssignPlan{},
expectChannelPlans: []ChannelAssignPlan{ expectChannelPlans: []ChannelAssignPlan{
{Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2}, From: 2, To: 3, ReplicaID: 1}, {Channel: &meta.DmChannel{VchannelInfo: &datapb.VchannelInfo{CollectionID: 1, ChannelName: "v3"}, Node: 2}, From: 2, To: 3, Replica: newReplicaDefaultRG(1)},
}, },
enableBalanceChannel: true, enableBalanceChannel: true,
}, },
@ -1013,11 +1014,11 @@ func (suite *RowCountBasedBalancerTestSuite) TestDisableBalanceChannel() {
defer Params.Reset(Params.QueryCoordCfg.AutoBalanceChannel.Key) defer Params.Reset(Params.QueryCoordCfg.AutoBalanceChannel.Key)
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1) segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, 1)
if !c.multiple { if !c.multiple {
suite.ElementsMatch(c.expectChannelPlans, channelPlans) assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
suite.ElementsMatch(c.expectPlans, segmentPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
} else { } else {
suite.Subset(c.expectPlans, segmentPlans) assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans, true)
suite.Subset(c.expectChannelPlans, channelPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans, true)
} }
// clear distribution // clear distribution
@ -1161,3 +1162,101 @@ func (suite *RowCountBasedBalancerTestSuite) TestMultiReplicaBalance() {
func TestRowCountBasedBalancerSuite(t *testing.T) { func TestRowCountBasedBalancerSuite(t *testing.T) {
suite.Run(t, new(RowCountBasedBalancerTestSuite)) suite.Run(t, new(RowCountBasedBalancerTestSuite))
} }
func newReplicaDefaultRG(replicaID int64) *meta.Replica {
return meta.NewReplica(
&querypb.Replica{
ID: replicaID,
ResourceGroup: meta.DefaultResourceGroupName,
},
typeutil.NewUniqueSet(),
)
}
// remove it after resource group enhancement.
func assertSegmentAssignPlanElementMatch(suite *suite.Suite, left []SegmentAssignPlan, right []SegmentAssignPlan, subset ...bool) {
suite.Equal(len(left), len(right))
type comparablePlan struct {
Segment *meta.Segment
ReplicaID int64
From int64
To int64
}
leftPlan := make([]comparablePlan, 0)
for _, p := range left {
replicaID := int64(-1)
if p.Replica != nil {
replicaID = p.Replica.GetID()
}
leftPlan = append(leftPlan, comparablePlan{
Segment: p.Segment,
ReplicaID: replicaID,
From: p.From,
To: p.To,
})
}
rightPlan := make([]comparablePlan, 0)
for _, p := range right {
replicaID := int64(-1)
if p.Replica != nil {
replicaID = p.Replica.GetID()
}
rightPlan = append(rightPlan, comparablePlan{
Segment: p.Segment,
ReplicaID: replicaID,
From: p.From,
To: p.To,
})
}
if len(subset) > 0 && subset[0] {
suite.Subset(leftPlan, rightPlan)
} else {
suite.ElementsMatch(leftPlan, rightPlan)
}
}
// remove it after resource group enhancement.
func assertChannelAssignPlanElementMatch(suite *suite.Suite, left []ChannelAssignPlan, right []ChannelAssignPlan, subset ...bool) {
type comparablePlan struct {
Channel *meta.DmChannel
ReplicaID int64
From int64
To int64
}
leftPlan := make([]comparablePlan, 0)
for _, p := range left {
replicaID := int64(-1)
if p.Replica != nil {
replicaID = p.Replica.GetID()
}
leftPlan = append(leftPlan, comparablePlan{
Channel: p.Channel,
ReplicaID: replicaID,
From: p.From,
To: p.To,
})
}
rightPlan := make([]comparablePlan, 0)
for _, p := range right {
replicaID := int64(-1)
if p.Replica != nil {
replicaID = p.Replica.GetID()
}
rightPlan = append(rightPlan, comparablePlan{
Channel: p.Channel,
ReplicaID: replicaID,
From: p.From,
To: p.To,
})
}
if len(subset) > 0 && subset[0] {
suite.Subset(leftPlan, rightPlan)
} else {
suite.ElementsMatch(leftPlan, rightPlan)
}
}

View File

@ -252,7 +252,7 @@ func (b *ScoreBasedBalancer) genStoppingSegmentPlan(replica *meta.Replica, onlin
plans := b.AssignSegment(replica.CollectionID, segments, onlineNodes) plans := b.AssignSegment(replica.CollectionID, segments, onlineNodes)
for i := range plans { for i := range plans {
plans[i].From = nodeID plans[i].From = nodeID
plans[i].ReplicaID = replica.ID plans[i].Replica = replica
} }
segmentPlans = append(segmentPlans, plans...) segmentPlans = append(segmentPlans, plans...)
} }
@ -316,7 +316,7 @@ func (b *ScoreBasedBalancer) genSegmentPlan(replica *meta.Replica, onlineNodes [
segmentPlans := b.AssignSegment(replica.CollectionID, segmentsToMove, onlineNodes) segmentPlans := b.AssignSegment(replica.CollectionID, segmentsToMove, onlineNodes)
for i := range segmentPlans { for i := range segmentPlans {
segmentPlans[i].From = segmentPlans[i].Segment.Node segmentPlans[i].From = segmentPlans[i].Segment.Node
segmentPlans[i].ReplicaID = replica.ID segmentPlans[i].Replica = replica
} }
return segmentPlans return segmentPlans

View File

@ -233,7 +233,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestAssignSegment() {
} }
for i := range c.collectionIDs { for i := range c.collectionIDs {
plans := balancer.AssignSegment(c.collectionIDs[i], c.assignments[i], c.nodes) plans := balancer.AssignSegment(c.collectionIDs[i], c.assignments[i], c.nodes)
suite.ElementsMatch(c.expectPlans[i], plans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans[i], plans)
} }
}) })
} }
@ -316,7 +316,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceOneRound() {
}, },
}, },
expectPlans: []SegmentAssignPlan{ expectPlans: []SegmentAssignPlan{
{Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2}, From: 2, To: 1, ReplicaID: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, Node: 2}, From: 2, To: 1, Replica: newReplicaDefaultRG(1)},
}, },
expectChannelPlans: []ChannelAssignPlan{}, expectChannelPlans: []ChannelAssignPlan{},
}, },
@ -386,8 +386,8 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceOneRound() {
// 4. balance and verify result // 4. balance and verify result
segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, c.collectionID) segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, c.collectionID)
suite.ElementsMatch(c.expectChannelPlans, channelPlans) assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
suite.ElementsMatch(c.expectPlans, segmentPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
}) })
} }
} }
@ -450,7 +450,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceMultiRound() {
Segment: &meta.Segment{ Segment: &meta.Segment{
SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 20}, SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 20},
Node: 2, Node: 2,
}, From: 2, To: 3, ReplicaID: 1, }, From: 2, To: 3, Replica: newReplicaDefaultRG(1),
}, },
}, },
{}, {},
@ -498,7 +498,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceMultiRound() {
// 4. first round balance // 4. first round balance
segmentPlans, _ := suite.getCollectionBalancePlans(balancer, balanceCase.collectionIDs[0]) segmentPlans, _ := suite.getCollectionBalancePlans(balancer, balanceCase.collectionIDs[0])
suite.ElementsMatch(balanceCase.expectPlans[0], segmentPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, balanceCase.expectPlans[0], segmentPlans)
// 5. update segment distribution to simulate balance effect // 5. update segment distribution to simulate balance effect
for node, s := range balanceCase.distributions[1] { for node, s := range balanceCase.distributions[1] {
@ -507,7 +507,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceMultiRound() {
// 6. balance again // 6. balance again
segmentPlans, _ = suite.getCollectionBalancePlans(balancer, balanceCase.collectionIDs[1]) segmentPlans, _ = suite.getCollectionBalancePlans(balancer, balanceCase.collectionIDs[1])
suite.ElementsMatch(balanceCase.expectPlans[1], segmentPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, balanceCase.expectPlans[1], segmentPlans)
} }
func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() { func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() {
@ -548,11 +548,11 @@ func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() {
{Segment: &meta.Segment{ {Segment: &meta.Segment{
SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20},
Node: 1, Node: 1,
}, From: 1, To: 3, ReplicaID: 1}, }, From: 1, To: 3, Replica: newReplicaDefaultRG(1)},
{Segment: &meta.Segment{ {Segment: &meta.Segment{
SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10},
Node: 1, Node: 1,
}, From: 1, To: 3, ReplicaID: 1}, }, From: 1, To: 3, Replica: newReplicaDefaultRG(1)},
}, },
expectChannelPlans: []ChannelAssignPlan{}, expectChannelPlans: []ChannelAssignPlan{},
}, },
@ -651,8 +651,8 @@ func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() {
// 4. balance and verify result // 4. balance and verify result
segmentPlans, channelPlans := suite.getCollectionBalancePlans(suite.balancer, c.collectionID) segmentPlans, channelPlans := suite.getCollectionBalancePlans(suite.balancer, c.collectionID)
suite.ElementsMatch(c.expectChannelPlans, channelPlans) assertChannelAssignPlanElementMatch(&suite.Suite, c.expectChannelPlans, channelPlans)
suite.ElementsMatch(c.expectPlans, segmentPlans) assertSegmentAssignPlanElementMatch(&suite.Suite, c.expectPlans, segmentPlans)
}) })
} }
} }

View File

@ -51,14 +51,14 @@ func CreateSegmentTasksFromPlans(ctx context.Context, source task.Source, timeou
timeout, timeout,
source, source,
p.Segment.GetCollectionID(), p.Segment.GetCollectionID(),
p.ReplicaID, p.Replica,
actions..., actions...,
) )
if err != nil { if err != nil {
log.Warn("create segment task from plan failed", log.Warn("create segment task from plan failed",
zap.Int64("collection", p.Segment.GetCollectionID()), zap.Int64("collection", p.Segment.GetCollectionID()),
zap.Int64("segmentID", p.Segment.GetID()), zap.Int64("segmentID", p.Segment.GetID()),
zap.Int64("replica", p.ReplicaID), zap.Int64("replica", p.Replica.GetID()),
zap.String("channel", p.Segment.GetInsertChannel()), zap.String("channel", p.Segment.GetInsertChannel()),
zap.Int64("from", p.From), zap.Int64("from", p.From),
zap.Int64("to", p.To), zap.Int64("to", p.To),
@ -70,7 +70,7 @@ func CreateSegmentTasksFromPlans(ctx context.Context, source task.Source, timeou
log.Info("create segment task", log.Info("create segment task",
zap.Int64("collection", p.Segment.GetCollectionID()), zap.Int64("collection", p.Segment.GetCollectionID()),
zap.Int64("segmentID", p.Segment.GetID()), zap.Int64("segmentID", p.Segment.GetID()),
zap.Int64("replica", p.ReplicaID), zap.Int64("replica", p.Replica.GetID()),
zap.String("channel", p.Segment.GetInsertChannel()), zap.String("channel", p.Segment.GetInsertChannel()),
zap.Int64("from", p.From), zap.Int64("from", p.From),
zap.Int64("to", p.To)) zap.Int64("to", p.To))
@ -98,11 +98,11 @@ func CreateChannelTasksFromPlans(ctx context.Context, source task.Source, timeou
action := task.NewChannelAction(p.From, task.ActionTypeReduce, p.Channel.GetChannelName()) action := task.NewChannelAction(p.From, task.ActionTypeReduce, p.Channel.GetChannelName())
actions = append(actions, action) actions = append(actions, action)
} }
t, err := task.NewChannelTask(ctx, timeout, source, p.Channel.GetCollectionID(), p.ReplicaID, actions...) t, err := task.NewChannelTask(ctx, timeout, source, p.Channel.GetCollectionID(), p.Replica, actions...)
if err != nil { if err != nil {
log.Warn("create channel task failed", log.Warn("create channel task failed",
zap.Int64("collection", p.Channel.GetCollectionID()), zap.Int64("collection", p.Channel.GetCollectionID()),
zap.Int64("replica", p.ReplicaID), zap.Int64("replica", p.Replica.GetID()),
zap.String("channel", p.Channel.GetChannelName()), zap.String("channel", p.Channel.GetChannelName()),
zap.Int64("from", p.From), zap.Int64("from", p.From),
zap.Int64("to", p.To), zap.Int64("to", p.To),
@ -113,7 +113,7 @@ func CreateChannelTasksFromPlans(ctx context.Context, source task.Source, timeou
log.Info("create channel task", log.Info("create channel task",
zap.Int64("collection", p.Channel.GetCollectionID()), zap.Int64("collection", p.Channel.GetCollectionID()),
zap.Int64("replica", p.ReplicaID), zap.Int64("replica", p.Replica.GetID()),
zap.String("channel", p.Channel.GetChannelName()), zap.String("channel", p.Channel.GetChannelName()),
zap.Int64("from", p.From), zap.Int64("from", p.From),
zap.Int64("to", p.To)) zap.Int64("to", p.To))

View File

@ -287,7 +287,7 @@ func (suite *BalanceCheckerTestSuite) TestStoppingBalance() {
segPlans, chanPlans := make([]balance.SegmentAssignPlan, 0), make([]balance.ChannelAssignPlan, 0) segPlans, chanPlans := make([]balance.SegmentAssignPlan, 0), make([]balance.ChannelAssignPlan, 0)
mockPlan := balance.SegmentAssignPlan{ mockPlan := balance.SegmentAssignPlan{
Segment: utils.CreateTestSegment(1, 1, 1, 1, 1, "1"), Segment: utils.CreateTestSegment(1, 1, 1, 1, 1, "1"),
ReplicaID: 1, Replica: meta.NilReplica,
From: 1, From: 1,
To: 2, To: 2,
} }

View File

@ -93,7 +93,7 @@ func (c *ChannelChecker) Check(ctx context.Context) []task.Task {
channels := c.dist.ChannelDistManager.GetAll() channels := c.dist.ChannelDistManager.GetAll()
released := utils.FilterReleased(channels, collectionIDs) released := utils.FilterReleased(channels, collectionIDs)
releaseTasks := c.createChannelReduceTasks(ctx, released, -1) releaseTasks := c.createChannelReduceTasks(ctx, released, meta.NilReplica)
task.SetReason("collection released", releaseTasks...) task.SetReason("collection released", releaseTasks...)
tasks = append(tasks, releaseTasks...) tasks = append(tasks, releaseTasks...)
return tasks return tasks
@ -107,12 +107,12 @@ func (c *ChannelChecker) checkReplica(ctx context.Context, replica *meta.Replica
task.SetReason("lacks of channel", tasks...) task.SetReason("lacks of channel", tasks...)
ret = append(ret, tasks...) ret = append(ret, tasks...)
tasks = c.createChannelReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), redundancies, replica.GetID()) tasks = c.createChannelReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), redundancies, replica)
task.SetReason("collection released", tasks...) task.SetReason("collection released", tasks...)
ret = append(ret, tasks...) ret = append(ret, tasks...)
repeated := c.findRepeatedChannels(ctx, replica.GetID()) repeated := c.findRepeatedChannels(ctx, replica.GetID())
tasks = c.createChannelReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), repeated, replica.GetID()) tasks = c.createChannelReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), repeated, replica)
task.SetReason("redundancies of channel", tasks...) task.SetReason("redundancies of channel", tasks...)
ret = append(ret, tasks...) ret = append(ret, tasks...)
@ -224,21 +224,21 @@ func (c *ChannelChecker) createChannelLoadTask(ctx context.Context, channels []*
}) })
plans := c.balancer.AssignChannel(channels, availableNodes) plans := c.balancer.AssignChannel(channels, availableNodes)
for i := range plans { for i := range plans {
plans[i].ReplicaID = replica.GetID() plans[i].Replica = replica
} }
return balance.CreateChannelTasksFromPlans(ctx, c.ID(), Params.QueryCoordCfg.ChannelTaskTimeout.GetAsDuration(time.Millisecond), plans) return balance.CreateChannelTasksFromPlans(ctx, c.ID(), Params.QueryCoordCfg.ChannelTaskTimeout.GetAsDuration(time.Millisecond), plans)
} }
func (c *ChannelChecker) createChannelReduceTasks(ctx context.Context, channels []*meta.DmChannel, replicaID int64) []task.Task { func (c *ChannelChecker) createChannelReduceTasks(ctx context.Context, channels []*meta.DmChannel, replica *meta.Replica) []task.Task {
ret := make([]task.Task, 0, len(channels)) ret := make([]task.Task, 0, len(channels))
for _, ch := range channels { for _, ch := range channels {
action := task.NewChannelAction(ch.Node, task.ActionTypeReduce, ch.GetChannelName()) action := task.NewChannelAction(ch.Node, task.ActionTypeReduce, ch.GetChannelName())
task, err := task.NewChannelTask(ctx, Params.QueryCoordCfg.ChannelTaskTimeout.GetAsDuration(time.Millisecond), c.ID(), ch.GetCollectionID(), replicaID, action) task, err := task.NewChannelTask(ctx, Params.QueryCoordCfg.ChannelTaskTimeout.GetAsDuration(time.Millisecond), c.ID(), ch.GetCollectionID(), replica, action)
if err != nil { if err != nil {
log.Warn("create channel reduce task failed", log.Warn("create channel reduce task failed",
zap.Int64("collection", ch.GetCollectionID()), zap.Int64("collection", ch.GetCollectionID()),
zap.Int64("replica", replicaID), zap.Int64("replica", replica.GetID()),
zap.String("channel", ch.GetChannelName()), zap.String("channel", ch.GetChannelName()),
zap.Int64("from", ch.Node), zap.Int64("from", ch.Node),
zap.Error(err), zap.Error(err),

View File

@ -107,7 +107,7 @@ func (suite *ChannelCheckerTestSuite) createMockBalancer() balance.Balance {
Channel: c, Channel: c,
From: -1, From: -1,
To: nodes[i%len(nodes)], To: nodes[i%len(nodes)],
ReplicaID: -1, Replica: meta.NilReplica,
} }
plans = append(plans, plan) plans = append(plans, plan)
} }

View File

@ -165,7 +165,7 @@ func (c *IndexChecker) createSegmentUpdateTask(ctx context.Context, segment *met
params.Params.QueryCoordCfg.SegmentTaskTimeout.GetAsDuration(time.Millisecond), params.Params.QueryCoordCfg.SegmentTaskTimeout.GetAsDuration(time.Millisecond),
c.ID(), c.ID(),
segment.GetCollectionID(), segment.GetCollectionID(),
replica.GetID(), replica,
action, action,
) )
if err != nil { if err != nil {

View File

@ -102,8 +102,8 @@ func (c *LeaderChecker) Check(ctx context.Context) []task.Task {
leaderViews := c.dist.LeaderViewManager.GetByCollectionAndNode(replica.GetCollectionID(), node) leaderViews := c.dist.LeaderViewManager.GetByCollectionAndNode(replica.GetCollectionID(), node)
for ch, leaderView := range leaderViews { for ch, leaderView := range leaderViews {
dist := c.dist.SegmentDistManager.GetByFilter(meta.WithChannel(ch), meta.WithReplica(replica)) dist := c.dist.SegmentDistManager.GetByFilter(meta.WithChannel(ch), meta.WithReplica(replica))
tasks = append(tasks, c.findNeedLoadedSegments(ctx, replica.ID, leaderView, dist)...) tasks = append(tasks, c.findNeedLoadedSegments(ctx, replica, leaderView, dist)...)
tasks = append(tasks, c.findNeedRemovedSegments(ctx, replica.ID, leaderView, dist)...) tasks = append(tasks, c.findNeedRemovedSegments(ctx, replica, leaderView, dist)...)
} }
} }
} }
@ -112,10 +112,10 @@ func (c *LeaderChecker) Check(ctx context.Context) []task.Task {
return tasks return tasks
} }
func (c *LeaderChecker) findNeedLoadedSegments(ctx context.Context, replica int64, leaderView *meta.LeaderView, dist []*meta.Segment) []task.Task { func (c *LeaderChecker) findNeedLoadedSegments(ctx context.Context, replica *meta.Replica, leaderView *meta.LeaderView, dist []*meta.Segment) []task.Task {
log := log.Ctx(ctx).With( log := log.Ctx(ctx).With(
zap.Int64("collectionID", leaderView.CollectionID), zap.Int64("collectionID", leaderView.CollectionID),
zap.Int64("replica", replica), zap.Int64("replica", replica.GetID()),
zap.String("channel", leaderView.Channel), zap.String("channel", leaderView.Channel),
zap.Int64("leaderViewID", leaderView.ID), zap.Int64("leaderViewID", leaderView.ID),
) )
@ -154,10 +154,10 @@ func (c *LeaderChecker) findNeedLoadedSegments(ctx context.Context, replica int6
return ret return ret
} }
func (c *LeaderChecker) findNeedRemovedSegments(ctx context.Context, replica int64, leaderView *meta.LeaderView, dists []*meta.Segment) []task.Task { func (c *LeaderChecker) findNeedRemovedSegments(ctx context.Context, replica *meta.Replica, leaderView *meta.LeaderView, dists []*meta.Segment) []task.Task {
log := log.Ctx(ctx).With( log := log.Ctx(ctx).With(
zap.Int64("collectionID", leaderView.CollectionID), zap.Int64("collectionID", leaderView.CollectionID),
zap.Int64("replica", replica), zap.Int64("replica", replica.GetID()),
zap.String("channel", leaderView.Channel), zap.String("channel", leaderView.Channel),
zap.Int64("leaderViewID", leaderView.ID), zap.Int64("leaderViewID", leaderView.ID),
) )

View File

@ -97,7 +97,7 @@ func (c *SegmentChecker) Check(ctx context.Context) []task.Task {
// find already released segments which are not contained in target // find already released segments which are not contained in target
segments := c.dist.SegmentDistManager.GetByFilter(nil) segments := c.dist.SegmentDistManager.GetByFilter(nil)
released := utils.FilterReleased(segments, collectionIDs) released := utils.FilterReleased(segments, collectionIDs)
reduceTasks := c.createSegmentReduceTasks(ctx, released, -1, querypb.DataScope_Historical) reduceTasks := c.createSegmentReduceTasks(ctx, released, meta.NilReplica, querypb.DataScope_Historical)
task.SetReason("collection released", reduceTasks...) task.SetReason("collection released", reduceTasks...)
results = append(results, reduceTasks...) results = append(results, reduceTasks...)
task.SetPriority(task.TaskPriorityNormal, results...) task.SetPriority(task.TaskPriorityNormal, results...)
@ -115,20 +115,20 @@ func (c *SegmentChecker) checkReplica(ctx context.Context, replica *meta.Replica
ret = append(ret, tasks...) ret = append(ret, tasks...)
redundancies = c.filterSegmentInUse(replica, redundancies) redundancies = c.filterSegmentInUse(replica, redundancies)
tasks = c.createSegmentReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), redundancies, replica.GetID(), querypb.DataScope_Historical) tasks = c.createSegmentReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), redundancies, replica, querypb.DataScope_Historical)
task.SetReason("segment not exists in target", tasks...) task.SetReason("segment not exists in target", tasks...)
ret = append(ret, tasks...) ret = append(ret, tasks...)
// compare inner dists to find repeated loaded segments // compare inner dists to find repeated loaded segments
redundancies = c.findRepeatedSealedSegments(replica.GetID()) redundancies = c.findRepeatedSealedSegments(replica.GetID())
redundancies = c.filterExistedOnLeader(replica, redundancies) redundancies = c.filterExistedOnLeader(replica, redundancies)
tasks = c.createSegmentReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), redundancies, replica.GetID(), querypb.DataScope_Historical) tasks = c.createSegmentReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), redundancies, replica, querypb.DataScope_Historical)
task.SetReason("redundancies of segment", tasks...) task.SetReason("redundancies of segment", tasks...)
ret = append(ret, tasks...) ret = append(ret, tasks...)
// compare with target to find the lack and redundancy of segments // compare with target to find the lack and redundancy of segments
_, redundancies = c.getGrowingSegmentDiff(replica.GetCollectionID(), replica.GetID()) _, redundancies = c.getGrowingSegmentDiff(replica.GetCollectionID(), replica.GetID())
tasks = c.createSegmentReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), redundancies, replica.GetID(), querypb.DataScope_Streaming) tasks = c.createSegmentReduceTasks(c.getTraceCtx(ctx, replica.CollectionID), redundancies, replica, querypb.DataScope_Streaming)
task.SetReason("streaming segment not exists in target", tasks...) task.SetReason("streaming segment not exists in target", tasks...)
ret = append(ret, tasks...) ret = append(ret, tasks...)
@ -381,7 +381,7 @@ func (c *SegmentChecker) createSegmentLoadTasks(ctx context.Context, segments []
}) })
shardPlans := c.balancer.AssignSegment(replica.CollectionID, segmentInfos, availableNodes) shardPlans := c.balancer.AssignSegment(replica.CollectionID, segmentInfos, availableNodes)
for i := range shardPlans { for i := range shardPlans {
shardPlans[i].ReplicaID = replica.GetID() shardPlans[i].Replica = replica
} }
plans = append(plans, shardPlans...) plans = append(plans, shardPlans...)
} }
@ -389,7 +389,7 @@ func (c *SegmentChecker) createSegmentLoadTasks(ctx context.Context, segments []
return balance.CreateSegmentTasksFromPlans(ctx, c.ID(), Params.QueryCoordCfg.SegmentTaskTimeout.GetAsDuration(time.Millisecond), plans) return balance.CreateSegmentTasksFromPlans(ctx, c.ID(), Params.QueryCoordCfg.SegmentTaskTimeout.GetAsDuration(time.Millisecond), plans)
} }
func (c *SegmentChecker) createSegmentReduceTasks(ctx context.Context, segments []*meta.Segment, replicaID int64, scope querypb.DataScope) []task.Task { func (c *SegmentChecker) createSegmentReduceTasks(ctx context.Context, segments []*meta.Segment, replica *meta.Replica, scope querypb.DataScope) []task.Task {
ret := make([]task.Task, 0, len(segments)) ret := make([]task.Task, 0, len(segments))
for _, s := range segments { for _, s := range segments {
action := task.NewSegmentActionWithScope(s.Node, task.ActionTypeReduce, s.GetInsertChannel(), s.GetID(), scope) action := task.NewSegmentActionWithScope(s.Node, task.ActionTypeReduce, s.GetInsertChannel(), s.GetID(), scope)
@ -398,13 +398,13 @@ func (c *SegmentChecker) createSegmentReduceTasks(ctx context.Context, segments
Params.QueryCoordCfg.SegmentTaskTimeout.GetAsDuration(time.Millisecond), Params.QueryCoordCfg.SegmentTaskTimeout.GetAsDuration(time.Millisecond),
c.ID(), c.ID(),
s.GetCollectionID(), s.GetCollectionID(),
replicaID, replica,
action, action,
) )
if err != nil { if err != nil {
log.Warn("create segment reduce task failed", log.Warn("create segment reduce task failed",
zap.Int64("collection", s.GetCollectionID()), zap.Int64("collection", s.GetCollectionID()),
zap.Int64("replica", replicaID), zap.Int64("replica", replica.GetID()),
zap.String("channel", s.GetInsertChannel()), zap.String("channel", s.GetInsertChannel()),
zap.Int64("from", s.Node), zap.Int64("from", s.Node),
zap.Error(err), zap.Error(err),

View File

@ -94,7 +94,7 @@ func (suite *SegmentCheckerTestSuite) createMockBalancer() balance.Balance {
Segment: s, Segment: s,
From: -1, From: -1,
To: nodes[i%len(nodes)], To: nodes[i%len(nodes)],
ReplicaID: -1, Replica: meta.NilReplica,
} }
plans = append(plans, plan) plans = append(plans, plan)
} }

View File

@ -144,7 +144,7 @@ func (s *Server) balanceSegments(ctx context.Context, req *querypb.LoadBalanceRe
Params.QueryCoordCfg.SegmentTaskTimeout.GetAsDuration(time.Millisecond), Params.QueryCoordCfg.SegmentTaskTimeout.GetAsDuration(time.Millisecond),
task.WrapIDSource(req.GetBase().GetMsgID()), task.WrapIDSource(req.GetBase().GetMsgID()),
req.GetCollectionID(), req.GetCollectionID(),
replica.GetID(), replica,
task.NewSegmentActionWithScope(plan.To, task.ActionTypeGrow, plan.Segment.GetInsertChannel(), plan.Segment.GetID(), querypb.DataScope_Historical), task.NewSegmentActionWithScope(plan.To, task.ActionTypeGrow, plan.Segment.GetInsertChannel(), plan.Segment.GetID(), querypb.DataScope_Historical),
task.NewSegmentActionWithScope(srcNode, task.ActionTypeReduce, plan.Segment.GetInsertChannel(), plan.Segment.GetID(), querypb.DataScope_Historical), task.NewSegmentActionWithScope(srcNode, task.ActionTypeReduce, plan.Segment.GetInsertChannel(), plan.Segment.GetID(), querypb.DataScope_Historical),
) )

View File

@ -30,6 +30,11 @@ import (
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
// NilReplica is used to represent a nil replica.
var NilReplica = NewReplica(&querypb.Replica{
ID: -1,
}, typeutil.NewUniqueSet())
type Replica struct { type Replica struct {
*querypb.Replica *querypb.Replica
nodes typeutil.UniqueSet // a helper field for manipulating replica's Nodes slice field nodes typeutil.UniqueSet // a helper field for manipulating replica's Nodes slice field

View File

@ -351,7 +351,7 @@ func (ob *TargetObserver) shouldUpdateCurrentTarget(ctx context.Context, collect
if updateVersionAction != nil { if updateVersionAction != nil {
actions = append(actions, updateVersionAction) actions = append(actions, updateVersionAction)
} }
if !ob.sync(ctx, replica.GetID(), leaderView, actions) { if !ob.sync(ctx, replica, leaderView, actions) {
return false return false
} }
} }
@ -360,10 +360,11 @@ func (ob *TargetObserver) shouldUpdateCurrentTarget(ctx context.Context, collect
return true return true
} }
func (ob *TargetObserver) sync(ctx context.Context, replicaID int64, leaderView *meta.LeaderView, diffs []*querypb.SyncAction) bool { func (ob *TargetObserver) sync(ctx context.Context, replica *meta.Replica, leaderView *meta.LeaderView, diffs []*querypb.SyncAction) bool {
if len(diffs) == 0 { if len(diffs) == 0 {
return true return true
} }
replicaID := replica.GetID()
log := log.With( log := log.With(
zap.Int64("leaderID", leaderView.ID), zap.Int64("leaderID", leaderView.ID),
@ -402,6 +403,8 @@ func (ob *TargetObserver) sync(ctx context.Context, replicaID int64, leaderView
LoadType: ob.meta.GetLoadType(leaderView.CollectionID), LoadType: ob.meta.GetLoadType(leaderView.CollectionID),
CollectionID: leaderView.CollectionID, CollectionID: leaderView.CollectionID,
PartitionIDs: partitions, PartitionIDs: partitions,
DbName: collectionInfo.GetDbName(),
ResourceGroup: replica.GetResourceGroup(),
}, },
Version: time.Now().UnixNano(), Version: time.Now().UnixNano(),
IndexInfoList: indexInfo, IndexInfoList: indexInfo,

View File

@ -320,6 +320,8 @@ func (ex *Executor) subscribeChannel(task *ChannelTask, step int) error {
loadMeta := packLoadMeta( loadMeta := packLoadMeta(
ex.meta.GetLoadType(task.CollectionID()), ex.meta.GetLoadType(task.CollectionID()),
task.CollectionID(), task.CollectionID(),
collectionInfo.GetDbName(),
task.ResourceGroup(),
partitions..., partitions...,
) )
@ -562,10 +564,13 @@ func (ex *Executor) getMetaInfo(ctx context.Context, task Task) (*milvuspb.Descr
} }
loadMeta := packLoadMeta( loadMeta := packLoadMeta(
ex.meta.GetLoadType(collectionID), ex.meta.GetLoadType(task.CollectionID()),
collectionID, task.CollectionID(),
collectionInfo.GetDbName(),
task.ResourceGroup(),
partitions..., partitions...,
) )
// get channel first, in case of target updated after segment info fetched // get channel first, in case of target updated after segment info fetched
channel := ex.targetMgr.GetDmChannel(collectionID, shard, meta.NextTargetFirst) channel := ex.targetMgr.GetDmChannel(collectionID, shard, meta.NextTargetFirst)
if channel == nil { if channel == nil {

View File

@ -71,7 +71,10 @@ type Task interface {
Source() Source Source() Source
ID() typeutil.UniqueID ID() typeutil.UniqueID
CollectionID() typeutil.UniqueID CollectionID() typeutil.UniqueID
// Return 0 if the task is a reduce task without given replica.
ReplicaID() typeutil.UniqueID ReplicaID() typeutil.UniqueID
// Return "" if the task is a reduce task without given replica.
ResourceGroup() string
Shard() string Shard() string
SetID(id typeutil.UniqueID) SetID(id typeutil.UniqueID)
Status() Status Status() Status
@ -106,7 +109,7 @@ type baseTask struct {
id typeutil.UniqueID // Set by scheduler id typeutil.UniqueID // Set by scheduler
collectionID typeutil.UniqueID collectionID typeutil.UniqueID
replicaID typeutil.UniqueID replica *meta.Replica
shard string shard string
loadType querypb.LoadType loadType querypb.LoadType
@ -125,14 +128,14 @@ type baseTask struct {
startTs time.Time startTs time.Time
} }
func newBaseTask(ctx context.Context, source Source, collectionID, replicaID typeutil.UniqueID, shard string, taskTag string) *baseTask { func newBaseTask(ctx context.Context, source Source, collectionID typeutil.UniqueID, replica *meta.Replica, shard string, taskTag string) *baseTask {
ctx, cancel := context.WithCancel(ctx) ctx, cancel := context.WithCancel(ctx)
ctx, span := otel.Tracer(typeutil.QueryCoordRole).Start(ctx, taskTag) ctx, span := otel.Tracer(typeutil.QueryCoordRole).Start(ctx, taskTag)
return &baseTask{ return &baseTask{
source: source, source: source,
collectionID: collectionID, collectionID: collectionID,
replicaID: replicaID, replica: replica,
shard: shard, shard: shard,
status: atomic.NewString(TaskStatusStarted), status: atomic.NewString(TaskStatusStarted),
@ -167,7 +170,13 @@ func (task *baseTask) CollectionID() typeutil.UniqueID {
} }
func (task *baseTask) ReplicaID() typeutil.UniqueID { func (task *baseTask) ReplicaID() typeutil.UniqueID {
return task.replicaID // replica may be nil, 0 will be generated.
return task.replica.GetID()
}
func (task *baseTask) ResourceGroup() string {
// replica may be nil, empty string will be generated.
return task.replica.GetResourceGroup()
} }
func (task *baseTask) Shard() string { func (task *baseTask) Shard() string {
@ -195,7 +204,7 @@ func (task *baseTask) SetPriority(priority Priority) {
} }
func (task *baseTask) Index() string { func (task *baseTask) Index() string {
return fmt.Sprintf("[replica=%d]", task.replicaID) return fmt.Sprintf("[replica=%d]", task.ReplicaID())
} }
func (task *baseTask) RecordStartTs() { func (task *baseTask) RecordStartTs() {
@ -282,13 +291,14 @@ func (task *baseTask) String() string {
} }
} }
return fmt.Sprintf( return fmt.Sprintf(
"[id=%d] [type=%s] [source=%s] [reason=%s] [collectionID=%d] [replicaID=%d] [priority=%s] [actionsCount=%d] [actions=%s]", "[id=%d] [type=%s] [source=%s] [reason=%s] [collectionID=%d] [replicaID=%d] [resourceGroup=%s] [priority=%s] [actionsCount=%d] [actions=%s]",
task.id, task.id,
GetTaskType(task).String(), GetTaskType(task).String(),
task.source.String(), task.source.String(),
task.reason, task.reason,
task.collectionID, task.collectionID,
task.replicaID, task.ReplicaID(),
task.ResourceGroup(),
task.priority.String(), task.priority.String(),
len(task.actions), len(task.actions),
actionsStr, actionsStr,
@ -307,8 +317,8 @@ type SegmentTask struct {
func NewSegmentTask(ctx context.Context, func NewSegmentTask(ctx context.Context,
timeout time.Duration, timeout time.Duration,
source Source, source Source,
collectionID, collectionID typeutil.UniqueID,
replicaID typeutil.UniqueID, replica *meta.Replica,
actions ...Action, actions ...Action,
) (*SegmentTask, error) { ) (*SegmentTask, error) {
if len(actions) == 0 { if len(actions) == 0 {
@ -330,7 +340,7 @@ func NewSegmentTask(ctx context.Context,
} }
} }
base := newBaseTask(ctx, source, collectionID, replicaID, shard, fmt.Sprintf("SegmentTask-%s-%d", actions[0].Type().String(), segmentID)) base := newBaseTask(ctx, source, collectionID, replica, shard, fmt.Sprintf("SegmentTask-%s-%d", actions[0].Type().String(), segmentID))
base.actions = actions base.actions = actions
return &SegmentTask{ return &SegmentTask{
baseTask: base, baseTask: base,
@ -360,8 +370,8 @@ type ChannelTask struct {
func NewChannelTask(ctx context.Context, func NewChannelTask(ctx context.Context,
timeout time.Duration, timeout time.Duration,
source Source, source Source,
collectionID, collectionID typeutil.UniqueID,
replicaID typeutil.UniqueID, replica *meta.Replica,
actions ...Action, actions ...Action,
) (*ChannelTask, error) { ) (*ChannelTask, error) {
if len(actions) == 0 { if len(actions) == 0 {
@ -381,7 +391,7 @@ func NewChannelTask(ctx context.Context,
} }
} }
base := newBaseTask(ctx, source, collectionID, replicaID, channel, fmt.Sprintf("ChannelTask-%s-%s", actions[0].Type().String(), channel)) base := newBaseTask(ctx, source, collectionID, replica, channel, fmt.Sprintf("ChannelTask-%s-%s", actions[0].Type().String(), channel))
base.actions = actions base.actions = actions
return &ChannelTask{ return &ChannelTask{
baseTask: base, baseTask: base,
@ -410,13 +420,13 @@ type LeaderTask struct {
func NewLeaderTask(ctx context.Context, func NewLeaderTask(ctx context.Context,
timeout time.Duration, timeout time.Duration,
source Source, source Source,
collectionID, collectionID typeutil.UniqueID,
replicaID typeutil.UniqueID, replica *meta.Replica,
leaderID int64, leaderID int64,
action *LeaderAction, action *LeaderAction,
) *LeaderTask { ) *LeaderTask {
segmentID := action.SegmentID() segmentID := action.SegmentID()
base := newBaseTask(ctx, source, collectionID, replicaID, action.Shard(), fmt.Sprintf("LeaderTask-%s-%d", action.Type().String(), segmentID)) base := newBaseTask(ctx, source, collectionID, replica, action.Shard(), fmt.Sprintf("LeaderTask-%s-%d", action.Type().String(), segmentID))
base.actions = []Action{action} base.actions = []Action{action}
return &LeaderTask{ return &LeaderTask{
baseTask: base, baseTask: base,

View File

@ -59,7 +59,7 @@ type TaskSuite struct {
// Data // Data
collection int64 collection int64
replica int64 replica *meta.Replica
subChannels []string subChannels []string
unsubChannels []string unsubChannels []string
moveChannels []string moveChannels []string
@ -86,7 +86,7 @@ type TaskSuite struct {
func (suite *TaskSuite) SetupSuite() { func (suite *TaskSuite) SetupSuite() {
paramtable.Init() paramtable.Init()
suite.collection = 1000 suite.collection = 1000
suite.replica = 10 suite.replica = newReplicaDefaultRG(10)
suite.subChannels = []string{ suite.subChannels = []string{
"sub-0", "sub-0",
"sub-1", "sub-1",
@ -191,8 +191,7 @@ func (suite *TaskSuite) BeforeTest(suiteName, testName string) {
PartitionID: 1, PartitionID: 1,
}, },
}) })
suite.meta.ReplicaManager.Put( suite.meta.ReplicaManager.Put(utils.CreateTestReplica(suite.replica.GetID(), suite.collection, []int64{1, 2, 3}))
utils.CreateTestReplica(suite.replica, suite.collection, []int64{1, 2, 3}))
} }
} }
@ -349,7 +348,7 @@ func (suite *TaskSuite) TestUnsubscribeChannelTask() {
timeout, timeout,
WrapIDSource(0), WrapIDSource(0),
suite.collection, suite.collection,
-1, meta.NilReplica,
NewChannelAction(targetNode, ActionTypeReduce, channel), NewChannelAction(targetNode, ActionTypeReduce, channel),
) )
@ -1343,39 +1342,39 @@ func (suite *TaskSuite) TestLeaderTaskSet() {
} }
func (suite *TaskSuite) TestCreateTaskBehavior() { func (suite *TaskSuite) TestCreateTaskBehavior() {
chanelTask, err := NewChannelTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, 0) chanelTask, err := NewChannelTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, meta.NilReplica)
suite.ErrorIs(err, merr.ErrParameterInvalid) suite.ErrorIs(err, merr.ErrParameterInvalid)
suite.Nil(chanelTask) suite.Nil(chanelTask)
action := NewSegmentAction(0, 0, "", 0) action := NewSegmentAction(0, 0, "", 0)
chanelTask, err = NewChannelTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, 0, action) chanelTask, err = NewChannelTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, meta.NilReplica, action)
suite.ErrorIs(err, merr.ErrParameterInvalid) suite.ErrorIs(err, merr.ErrParameterInvalid)
suite.Nil(chanelTask) suite.Nil(chanelTask)
action1 := NewChannelAction(0, 0, "fake-channel1") action1 := NewChannelAction(0, 0, "fake-channel1")
action2 := NewChannelAction(0, 0, "fake-channel2") action2 := NewChannelAction(0, 0, "fake-channel2")
chanelTask, err = NewChannelTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, 0, action1, action2) chanelTask, err = NewChannelTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, meta.NilReplica, action1, action2)
suite.ErrorIs(err, merr.ErrParameterInvalid) suite.ErrorIs(err, merr.ErrParameterInvalid)
suite.Nil(chanelTask) suite.Nil(chanelTask)
segmentTask, err := NewSegmentTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, 0) segmentTask, err := NewSegmentTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, meta.NilReplica)
suite.ErrorIs(err, merr.ErrParameterInvalid) suite.ErrorIs(err, merr.ErrParameterInvalid)
suite.Nil(segmentTask) suite.Nil(segmentTask)
channelAction := NewChannelAction(0, 0, "fake-channel1") channelAction := NewChannelAction(0, 0, "fake-channel1")
segmentTask, err = NewSegmentTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, 0, channelAction) segmentTask, err = NewSegmentTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, meta.NilReplica, channelAction)
suite.ErrorIs(err, merr.ErrParameterInvalid) suite.ErrorIs(err, merr.ErrParameterInvalid)
suite.Nil(segmentTask) suite.Nil(segmentTask)
segmentAction1 := NewSegmentAction(0, 0, "", 0) segmentAction1 := NewSegmentAction(0, 0, "", 0)
segmentAction2 := NewSegmentAction(0, 0, "", 1) segmentAction2 := NewSegmentAction(0, 0, "", 1)
segmentTask, err = NewSegmentTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, 0, segmentAction1, segmentAction2) segmentTask, err = NewSegmentTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, meta.NilReplica, segmentAction1, segmentAction2)
suite.ErrorIs(err, merr.ErrParameterInvalid) suite.ErrorIs(err, merr.ErrParameterInvalid)
suite.Nil(segmentTask) suite.Nil(segmentTask)
leaderAction := NewLeaderAction(1, 2, ActionTypeGrow, "fake-channel1", 100, 0) leaderAction := NewLeaderAction(1, 2, ActionTypeGrow, "fake-channel1", 100, 0)
leaderTask := NewLeaderTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, 0, 1, leaderAction) leaderTask := NewLeaderTask(context.TODO(), 5*time.Second, WrapIDSource(0), 0, meta.NilReplica, 1, leaderAction)
suite.NotNil(leaderTask) suite.NotNil(leaderTask)
} }
@ -1447,8 +1446,7 @@ func (suite *TaskSuite) TestNoExecutor() {
ChannelName: Params.CommonCfg.RootCoordDml.GetValue() + "-test", ChannelName: Params.CommonCfg.RootCoordDml.GetValue() + "-test",
} }
suite.meta.ReplicaManager.Put( suite.meta.ReplicaManager.Put(utils.CreateTestReplica(suite.replica.GetID(), suite.collection, []int64{1, 2, 3, -1}))
utils.CreateTestReplica(suite.replica, suite.collection, []int64{1, 2, 3, -1}))
// Test load segment task // Test load segment task
suite.dist.ChannelDistManager.Update(targetNode, meta.DmChannelFromVChannel(&datapb.VchannelInfo{ suite.dist.ChannelDistManager.Update(targetNode, meta.DmChannelFromVChannel(&datapb.VchannelInfo{
@ -1666,7 +1664,7 @@ func (suite *TaskSuite) TestBalanceChannelTask() {
10*time.Second, 10*time.Second,
WrapIDSource(2), WrapIDSource(2),
collectionID, collectionID,
1, meta.NilReplica,
NewChannelAction(1, ActionTypeGrow, channel), NewChannelAction(1, ActionTypeGrow, channel),
NewChannelAction(2, ActionTypeReduce, channel), NewChannelAction(2, ActionTypeReduce, channel),
) )
@ -1761,7 +1759,12 @@ func (suite *TaskSuite) TestBalanceChannelWithL0SegmentTask() {
10*time.Second, 10*time.Second,
WrapIDSource(2), WrapIDSource(2),
collectionID, collectionID,
1, meta.NewReplica(
&querypb.Replica{
ID: 1,
},
typeutil.NewUniqueSet(),
),
NewChannelAction(1, ActionTypeGrow, channel), NewChannelAction(1, ActionTypeGrow, channel),
NewChannelAction(2, ActionTypeReduce, channel), NewChannelAction(2, ActionTypeReduce, channel),
) )
@ -1795,3 +1798,13 @@ func (suite *TaskSuite) TestBalanceChannelWithL0SegmentTask() {
func TestTask(t *testing.T) { func TestTask(t *testing.T) {
suite.Run(t, new(TaskSuite)) suite.Run(t, new(TaskSuite))
} }
func newReplicaDefaultRG(replicaID int64) *meta.Replica {
return meta.NewReplica(
&querypb.Replica{
ID: replicaID,
ResourceGroup: meta.DefaultResourceGroupName,
},
typeutil.NewUniqueSet(),
)
}

View File

@ -180,11 +180,13 @@ func packReleaseSegmentRequest(task *SegmentTask, action *SegmentAction) *queryp
} }
} }
func packLoadMeta(loadType querypb.LoadType, collectionID int64, partitions ...int64) *querypb.LoadMetaInfo { func packLoadMeta(loadType querypb.LoadType, collectionID int64, databaseName string, resourceGroup string, partitions ...int64) *querypb.LoadMetaInfo {
return &querypb.LoadMetaInfo{ return &querypb.LoadMetaInfo{
LoadType: loadType, LoadType: loadType,
CollectionID: collectionID, CollectionID: collectionID,
PartitionIDs: partitions, PartitionIDs: partitions,
DbName: databaseName,
ResourceGroup: resourceGroup,
} }
} }

View File

@ -43,7 +43,7 @@ func (s *UtilsSuite) TestPackLoadSegmentRequest() {
time.Second, time.Second,
nil, nil,
1, 1,
10, newReplicaDefaultRG(10),
action, action,
) )
s.NoError(err) s.NoError(err)
@ -96,7 +96,7 @@ func (s *UtilsSuite) TestPackLoadSegmentRequestMmap() {
time.Second, time.Second,
nil, nil,
1, 1,
10, newReplicaDefaultRG(10),
action, action,
) )
s.NoError(err) s.NoError(err)

View File

@ -94,7 +94,9 @@ func (suite *LocalWorkerTestSuite) BeforeTest(suiteName, testName string) {
suite.schema = segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64, true) suite.schema = segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64, true)
suite.indexMeta = segments.GenTestIndexMeta(suite.collectionID, suite.schema) suite.indexMeta = segments.GenTestIndexMeta(suite.collectionID, suite.schema)
collection := segments.NewCollection(suite.collectionID, suite.schema, suite.indexMeta, querypb.LoadType_LoadCollection) collection := segments.NewCollection(suite.collectionID, suite.schema, suite.indexMeta, &querypb.LoadMetaInfo{
LoadType: querypb.LoadType_LoadCollection,
})
loadMata := &querypb.LoadMetaInfo{ loadMata := &querypb.LoadMetaInfo{
LoadType: querypb.LoadType_LoadCollection, LoadType: querypb.LoadType_LoadCollection,
CollectionID: suite.collectionID, CollectionID: suite.collectionID,

View File

@ -61,7 +61,9 @@ func (suite *InsertNodeSuite) TestBasic() {
schema := segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64, true) schema := segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64, true)
in := suite.buildInsertNodeMsg(schema) in := suite.buildInsertNodeMsg(schema)
collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), querypb.LoadType_LoadCollection) collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), &querypb.LoadMetaInfo{
LoadType: querypb.LoadType_LoadCollection,
})
collection.AddPartition(suite.partitionID) collection.AddPartition(suite.partitionID)
// init mock // init mock
@ -95,7 +97,9 @@ func (suite *InsertNodeSuite) TestDataTypeNotSupported() {
schema := segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64, true) schema := segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64, true)
in := suite.buildInsertNodeMsg(schema) in := suite.buildInsertNodeMsg(schema)
collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), querypb.LoadType_LoadCollection) collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), &querypb.LoadMetaInfo{
LoadType: querypb.LoadType_LoadCollection,
})
collection.AddPartition(suite.partitionID) collection.AddPartition(suite.partitionID)
// init mock // init mock

View File

@ -109,7 +109,9 @@ func (suite *PipelineTestSuite) TestBasic() {
// init mock // init mock
// mock collection manager // mock collection manager
schema := segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64, true) schema := segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64, true)
collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), querypb.LoadType_LoadCollection) collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), &querypb.LoadMetaInfo{
LoadType: querypb.LoadType_LoadCollection,
})
suite.collectionManager.EXPECT().Get(suite.collectionID).Return(collection) suite.collectionManager.EXPECT().Get(suite.collectionID).Return(collection)
// mock mq factory // mock mq factory

View File

@ -93,8 +93,7 @@ func (m *collectionManager) PutOrRef(collectionID int64, schema *schemapb.Collec
return return
} }
collection := NewCollection(collectionID, schema, meta, loadMeta.GetLoadType()) collection := NewCollection(collectionID, schema, meta, loadMeta)
collection.AddPartition(loadMeta.GetPartitionIDs()...)
collection.Ref(1) collection.Ref(1)
m.collections[collectionID] = collection m.collections[collectionID] = collection
} }
@ -139,12 +138,19 @@ func (m *collectionManager) Unref(collectionID int64, count uint32) bool {
} }
// Collection is a wrapper of the underlying C-structure C.CCollection // Collection is a wrapper of the underlying C-structure C.CCollection
// In a query node, `Collection` is a replica info of a collection in these query node.
type Collection struct { type Collection struct {
mu sync.RWMutex // protects colllectionPtr mu sync.RWMutex // protects colllectionPtr
collectionPtr C.CCollection collectionPtr C.CCollection
id int64 id int64
partitions *typeutil.ConcurrentSet[int64] partitions *typeutil.ConcurrentSet[int64]
loadType querypb.LoadType loadType querypb.LoadType
dbName string
resourceGroup string
// resource group of node may be changed if node transfer,
// but Collection in Manager will be released before assign new replica of new resource group on these node.
// so we don't need to update resource group in Collection.
// if resource group is not updated, the reference count of collection manager works failed.
metricType atomic.String // deprecated metricType atomic.String // deprecated
schema atomic.Pointer[schemapb.CollectionSchema] schema atomic.Pointer[schemapb.CollectionSchema]
isGpuIndex bool isGpuIndex bool
@ -152,6 +158,16 @@ type Collection struct {
refCount *atomic.Uint32 refCount *atomic.Uint32
} }
// GetDBName returns the database name of collection.
func (c *Collection) GetDBName() string {
return c.dbName
}
// GetResourceGroup returns the resource group of collection.
func (c *Collection) GetResourceGroup() string {
return c.resourceGroup
}
// ID returns collection id // ID returns collection id
func (c *Collection) ID() int64 { func (c *Collection) ID() int64 {
return c.id return c.id
@ -214,7 +230,7 @@ func (c *Collection) Unref(count uint32) uint32 {
} }
// newCollection returns a new Collection // newCollection returns a new Collection
func NewCollection(collectionID int64, schema *schemapb.CollectionSchema, indexMeta *segcorepb.CollectionIndexMeta, loadType querypb.LoadType) *Collection { func NewCollection(collectionID int64, schema *schemapb.CollectionSchema, indexMeta *segcorepb.CollectionIndexMeta, loadMetaInfo *querypb.LoadMetaInfo) *Collection {
/* /*
CCollection CCollection
NewCollection(const char* schema_proto_blob); NewCollection(const char* schema_proto_blob);
@ -250,10 +266,15 @@ func NewCollection(collectionID int64, schema *schemapb.CollectionSchema, indexM
collectionPtr: collection, collectionPtr: collection,
id: collectionID, id: collectionID,
partitions: typeutil.NewConcurrentSet[int64](), partitions: typeutil.NewConcurrentSet[int64](),
loadType: loadType, loadType: loadMetaInfo.GetLoadType(),
dbName: loadMetaInfo.GetDbName(),
resourceGroup: loadMetaInfo.GetResourceGroup(),
refCount: atomic.NewUint32(0), refCount: atomic.NewUint32(0),
isGpuIndex: isGpuIndex, isGpuIndex: isGpuIndex,
} }
for _, partitionID := range loadMetaInfo.GetPartitionIDs() {
coll.partitions.Insert(partitionID)
}
coll.schema.Store(schema) coll.schema.Store(schema)
return coll return coll

View File

@ -46,7 +46,9 @@ func (s *ManagerSuite) SetupTest() {
schema := GenTestCollectionSchema("manager-suite", schemapb.DataType_Int64, true) schema := GenTestCollectionSchema("manager-suite", schemapb.DataType_Int64, true)
segment, err := NewSegment( segment, err := NewSegment(
context.Background(), context.Background(),
NewCollection(s.collectionIDs[i], schema, GenTestIndexMeta(s.collectionIDs[i], schema), querypb.LoadType_LoadCollection), NewCollection(s.collectionIDs[i], schema, GenTestIndexMeta(s.collectionIDs[i], schema), &querypb.LoadMetaInfo{
LoadType: querypb.LoadType_LoadCollection,
}),
s.types[i], s.types[i],
0, 0,
&querypb.SegmentLoadInfo{ &querypb.SegmentLoadInfo{

View File

@ -44,7 +44,9 @@ func (suite *PlanSuite) SetupTest() {
suite.partitionID = 10 suite.partitionID = 10
suite.segmentID = 1 suite.segmentID = 1
schema := GenTestCollectionSchema("plan-suite", schemapb.DataType_Int64, true) schema := GenTestCollectionSchema("plan-suite", schemapb.DataType_Int64, true)
suite.collection = NewCollection(suite.collectionID, schema, GenTestIndexMeta(suite.collectionID, schema), querypb.LoadType_LoadCollection) suite.collection = NewCollection(suite.collectionID, schema, GenTestIndexMeta(suite.collectionID, schema), &querypb.LoadMetaInfo{
LoadType: querypb.LoadType_LoadCollection,
})
suite.collection.AddPartition(suite.partitionID) suite.collection.AddPartition(suite.partitionID)
} }

View File

@ -71,8 +71,9 @@ func (suite *ReduceSuite) SetupTest() {
suite.collection = NewCollection(suite.collectionID, suite.collection = NewCollection(suite.collectionID,
schema, schema,
GenTestIndexMeta(suite.collectionID, schema), GenTestIndexMeta(suite.collectionID, schema),
querypb.LoadType_LoadCollection, &querypb.LoadMetaInfo{
) LoadType: querypb.LoadType_LoadCollection,
})
suite.segment, err = NewSegment(ctx, suite.segment, err = NewSegment(ctx,
suite.collection, suite.collection,
SegmentTypeSealed, SegmentTypeSealed,

View File

@ -693,7 +693,7 @@ func (suite *SegmentLoaderDetailSuite) SetupTest() {
PartitionIDs: []int64{suite.partitionID}, PartitionIDs: []int64{suite.partitionID},
} }
collection := NewCollection(suite.collectionID, schema, indexMeta, loadMeta.GetLoadType()) collection := NewCollection(suite.collectionID, schema, indexMeta, loadMeta)
suite.collectionManager.EXPECT().Get(suite.collectionID).Return(collection).Maybe() suite.collectionManager.EXPECT().Get(suite.collectionID).Return(collection).Maybe()
} }

View File

@ -219,7 +219,9 @@ func (suite *QueryNodeSuite) TestStop() {
suite.node.manager = segments.NewManager() suite.node.manager = segments.NewManager()
schema := segments.GenTestCollectionSchema("test_stop", schemapb.DataType_Int64, true) schema := segments.GenTestCollectionSchema("test_stop", schemapb.DataType_Int64, true)
collection := segments.NewCollection(1, schema, nil, querypb.LoadType_LoadCollection) collection := segments.NewCollection(1, schema, nil, &querypb.LoadMetaInfo{
LoadType: querypb.LoadType_LoadCollection,
})
segment, err := segments.NewSegment( segment, err := segments.NewSegment(
context.Background(), context.Background(),
collection, collection,