milvus/pkg/mq/msgstream/mq_factory_test.go
yihao.dai b18ebd9468
enhance: Remove legacy cdc/replication (#46603)
issue: https://github.com/milvus-io/milvus/issues/44123

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
- Core invariant: legacy in-cluster CDC/replication plumbing
(ReplicateMsg types, ReplicateID-based guards and flags) is obsolete —
the system relies on standard msgstream positions, subPos/end-ts
semantics and timetick ordering as the single source of truth for
message ordering and skipping, so replication-specific
channels/types/guards can be removed safely.

- Removed/simplified logic (what and why): removed replication feature
flags and params (ReplicateMsgChannel, TTMsgEnabled,
CollectionReplicateEnable), ReplicateMsg type and its tests, ReplicateID
constants/helpers and MergeProperties hooks, ReplicateConfig and its
propagation (streamPipeline, StreamConfig, dispatcher, target),
replicate-aware dispatcher/pipeline branches, and replicate-mode
pre-checks/timestamp-allocation in proxy tasks — these implemented a
redundant alternate “replicate-mode” pathway that duplicated
position/end-ts and timetick logic.

- Why this does NOT cause data loss or regression (concrete code paths):
no persistence or core write paths were removed — proxy PreExecute flows
(internal/proxy/task_*.go) still perform the same schema/ID/size
validations and then follow the normal non-replicate execution path;
dispatcher and pipeline continue to use position/subPos and
pullback/end-ts in Seek/grouping (pkg/mq/msgdispatcher/dispatcher.go,
internal/util/pipeline/stream_pipeline.go), so skipping and ordering
behavior remains unchanged; timetick emission in rootcoord
(sendMinDdlTsAsTt) is now ungated (no silent suppression), preserving or
increasing timetick delivery rather than removing it.

- PR type and net effect: Enhancement/Refactor — removes deprecated
replication API surface (types, helpers, config, tests) and replication
branches, simplifies public APIs and constructor signatures, and reduces
surface area for future maintenance while keeping DML/DDL persistence,
ordering, and seek semantics intact.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->

---------

Signed-off-by: bigsheeper <yihao.dai@zilliz.com>
2025-12-30 14:53:21 +08:00

229 lines
5.8 KiB
Go

// Licensed to the LF AI & Data foundation under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package msgstream
import (
"context"
"os"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/milvus-io/milvus/pkg/v2/mq/common"
"github.com/milvus-io/milvus/pkg/v2/util/paramtable"
)
func TestPmsFactory(t *testing.T) {
pmsFactory := NewPmsFactory(&Params.ServiceParam)
err := pmsFactory.NewMsgStreamDisposer(context.Background())([]string{"hello"}, "xx")
assert.NoError(t, err)
tests := []struct {
description string
withTimeout bool
ctxTimeouted bool
expectedError bool
}{
{"normal ctx", false, false, false},
{"timeout ctx not timeout", true, false, false},
{"timeout ctx timeout", true, true, true},
}
for _, test := range tests {
t.Run(test.description, func(t *testing.T) {
var cancel context.CancelFunc
ctx := context.Background()
if test.withTimeout {
if test.ctxTimeouted {
ctx, cancel = context.WithDeadline(ctx, time.Now().Add(-1*time.Minute))
} else {
ctx, cancel = context.WithTimeout(ctx, time.Second*10)
}
defer cancel()
}
stream, err := pmsFactory.NewMsgStream(ctx)
if test.expectedError {
assert.Error(t, err)
assert.Nil(t, stream)
} else {
assert.NoError(t, err)
assert.NotNil(t, stream)
}
ttStream, err := pmsFactory.NewTtMsgStream(ctx)
if test.expectedError {
assert.Error(t, err)
assert.Nil(t, ttStream)
} else {
assert.NoError(t, err)
assert.NotNil(t, ttStream)
}
})
}
}
func TestPmsFactoryWithAuth(t *testing.T) {
config := &Params.ServiceParam
Params.Save(Params.PulsarCfg.AuthPlugin.Key, "token")
Params.Save(Params.PulsarCfg.AuthParams.Key, "token:fake_token")
defer func() {
Params.Save(Params.PulsarCfg.AuthPlugin.Key, "")
Params.Save(Params.PulsarCfg.AuthParams.Key, "")
}()
pmsFactory := NewPmsFactory(config)
ctx := context.Background()
_, err := pmsFactory.NewMsgStream(ctx)
assert.NoError(t, err)
_, err = pmsFactory.NewTtMsgStream(ctx)
assert.NoError(t, err)
Params.Save(Params.PulsarCfg.AuthParams.Key, "")
pmsFactory = NewPmsFactory(config)
ctx = context.Background()
_, err = pmsFactory.NewMsgStream(ctx)
assert.Error(t, err)
_, err = pmsFactory.NewTtMsgStream(ctx)
assert.Error(t, err)
}
func TestKafkaFactory(t *testing.T) {
kmsFactory := NewKmsFactory(&Params.ServiceParam)
tests := []struct {
description string
withTimeout bool
ctxTimeouted bool
expectedError bool
}{
{"normal ctx", false, false, false},
{"timeout ctx not timeout", true, false, false},
{"timeout ctx timeout", true, true, true},
}
for _, test := range tests {
t.Run(test.description, func(t *testing.T) {
var cancel context.CancelFunc
ctx := context.Background()
if test.withTimeout {
if test.ctxTimeouted {
ctx, cancel = context.WithDeadline(ctx, time.Now().Add(-1*time.Minute))
} else {
ctx, cancel = context.WithTimeout(ctx, time.Second*10)
}
defer cancel()
}
stream, err := kmsFactory.NewMsgStream(ctx)
if test.expectedError {
assert.Error(t, err)
assert.Nil(t, stream)
} else {
assert.NoError(t, err)
assert.NotNil(t, stream)
}
ttStream, err := kmsFactory.NewTtMsgStream(ctx)
if test.expectedError {
assert.Error(t, err)
assert.Nil(t, ttStream)
} else {
assert.NoError(t, err)
assert.NotNil(t, ttStream)
}
})
}
}
func TestRmsFactory(t *testing.T) {
defer os.Unsetenv("ROCKSMQ_PATH")
paramtable.Init()
dir := t.TempDir()
rmsFactory := NewRocksmqFactory(dir, &paramtable.Get().ServiceParam)
ctx := context.Background()
_, err := rmsFactory.NewMsgStream(ctx)
assert.NoError(t, err)
_, err = rmsFactory.NewTtMsgStream(ctx)
assert.NoError(t, err)
}
func TestWpmsFactory(t *testing.T) {
wpmsFactory := NewWpmsFactory(&Params.ServiceParam)
ctx := context.Background()
// Test NewMsgStream
stream, err := wpmsFactory.NewMsgStream(ctx)
assert.NoError(t, err)
assert.NotNil(t, stream)
// Test NewTtMsgStream
ttStream, err := wpmsFactory.NewTtMsgStream(ctx)
assert.NoError(t, err)
assert.Nil(t, ttStream)
// Test NewMsgStreamDisposer
disposer := wpmsFactory.NewMsgStreamDisposer(ctx)
assert.Nil(t, disposer)
}
func TestWpMsgStream(t *testing.T) {
wpStream := &WpMsgStream{}
// Test methods return expected values
assert.Nil(t, wpStream.GetProduceChannels())
assert.Nil(t, wpStream.Chan())
assert.Nil(t, wpStream.GetUnmarshalDispatcher())
// Test methods execute without panic
ctx := context.Background()
// Test all no-op methods
wpStream.Close()
wpStream.AsProducer(ctx, []string{"test-channel"})
wpStream.SetRepackFunc(nil)
// Test methods returning nil/empty values
msgID, err := wpStream.GetLatestMsgID("test-channel")
assert.Nil(t, msgID)
assert.Nil(t, err)
err = wpStream.CheckTopicValid("test-channel")
assert.Nil(t, err)
err = wpStream.Produce(ctx, &MsgPack{})
assert.Nil(t, err)
broadcastResult, err := wpStream.Broadcast(ctx, &MsgPack{})
assert.Nil(t, broadcastResult)
assert.Nil(t, err)
err = wpStream.AsConsumer(ctx, []string{"test-channel"}, "test-sub", common.SubscriptionPositionEarliest)
assert.Nil(t, err)
err = wpStream.Seek(ctx, []*MsgPosition{}, false)
assert.Nil(t, err)
}