From 3a66e1de6531b6290e93aca96c8b03c7ddb6bba4 Mon Sep 17 00:00:00 2001 From: congqixia Date: Fri, 19 May 2023 15:57:24 +0800 Subject: [PATCH] Use suite for integration test (#24253) Signed-off-by: Congqi Xia --- pkg/config/etcd_source.go | 5 +- pkg/config/manager.go | 2 + pkg/util/etcd/etcd_util.go | 2 +- scripts/run_intergration_test.sh | 2 +- tests/integration/bulkinsert_test.go | 66 +- .../integration/get_index_statistics_test.go | 72 +- tests/integration/get_vector_test.go | 302 +++---- tests/integration/hello_milvus_test.go | 64 +- tests/integration/json_expr_test.go | 847 +++++++++--------- tests/integration/meta_watcher_test.go | 103 +-- tests/integration/minicluster.go | 4 + tests/integration/minicluster_test.go | 179 ++-- tests/integration/range_search_test.go | 111 ++- tests/integration/refresh_config_test.go | 79 +- tests/integration/suite_test.go | 103 +++ tests/integration/upsert_test.go | 58 +- 16 files changed, 1027 insertions(+), 972 deletions(-) create mode 100644 tests/integration/suite_test.go diff --git a/pkg/config/etcd_source.go b/pkg/config/etcd_source.go index 79136b91bf..17ba85565c 100644 --- a/pkg/config/etcd_source.go +++ b/pkg/config/etcd_source.go @@ -19,6 +19,7 @@ package config import ( "context" "fmt" + "path" "strings" "sync" "time" @@ -104,7 +105,7 @@ func (es *EtcdSource) GetSourceName() string { } func (es *EtcdSource) Close() { - es.etcdCli.Close() + // cannot close client here, since client is shared with components es.configRefresher.stop() } @@ -128,7 +129,7 @@ func (es *EtcdSource) UpdateOptions(opts Options) { func (es *EtcdSource) refreshConfigurations() error { es.RLock() - prefix := es.keyPrefix + "/config" + prefix := path.Join(es.keyPrefix, "config") es.RUnlock() ctx, cancel := context.WithTimeout(es.ctx, ReadConfigTimeout) diff --git a/pkg/config/manager.go b/pkg/config/manager.go index f106fe629a..a853abdb3a 100644 --- a/pkg/config/manager.go +++ b/pkg/config/manager.go @@ -162,6 +162,8 @@ func (m *Manager) FileConfigs() map[string]string { } func (m *Manager) Close() { + m.Lock() + defer m.Unlock() for _, s := range m.sources { s.Close() } diff --git a/pkg/util/etcd/etcd_util.go b/pkg/util/etcd/etcd_util.go index a9f065c969..58bf2e2085 100644 --- a/pkg/util/etcd/etcd_util.go +++ b/pkg/util/etcd/etcd_util.go @@ -175,7 +175,7 @@ func buildKvGroup(keys, values []string) (map[string]string, error) { // StartTestEmbedEtcdServer returns a newly created embed etcd server. // ### USED FOR UNIT TEST ONLY ### func StartTestEmbedEtcdServer() (*embed.Etcd, string, error) { - dir, err := ioutil.TempDir(os.TempDir(), "milvus_datanode_ut") + dir, err := ioutil.TempDir(os.TempDir(), "milvus_ut") if err != nil { return nil, "", err } diff --git a/scripts/run_intergration_test.sh b/scripts/run_intergration_test.sh index 8bee3d6141..4600035ef2 100755 --- a/scripts/run_intergration_test.sh +++ b/scripts/run_intergration_test.sh @@ -17,7 +17,7 @@ # limitations under the License. # run integration test -echo "Running integration test uner ./tests/integration" +echo "Running integration test under ./tests/integration" BASEDIR=$(dirname "$0") source $BASEDIR/setenv.sh diff --git a/tests/integration/bulkinsert_test.go b/tests/integration/bulkinsert_test.go index b6d0231a10..bb57292d96 100644 --- a/tests/integration/bulkinsert_test.go +++ b/tests/integration/bulkinsert_test.go @@ -27,6 +27,7 @@ import ( "github.com/golang/protobuf/proto" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/commonpb" @@ -44,6 +45,10 @@ const ( Dim = 128 ) +type BulkInsertSuite struct { + MiniClusterSuite +} + // test bulk insert E2E // 1, create collection with a vector column and a varchar column // 2, generate numpy files @@ -51,17 +56,10 @@ const ( // 4, create index // 5, load // 6, search -func TestBulkInsert(t *testing.T) { - ctx, cancel := context.WithTimeout(context.Background(), time.Second*180) - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer func() { - err = c.Stop() - assert.NoError(t, err) - cancel() - }() +func (s *BulkInsertSuite) TestBulkInsert() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() prefix := "TestBulkInsert" dbName := "" @@ -75,7 +73,7 @@ func TestBulkInsert(t *testing.T) { &schemapb.FieldSchema{Name: "embeddings", DataType: schemapb.DataType_FloatVector, TypeParams: []*commonpb.KeyValuePair{{Key: common.DimKey, Value: "128"}}}, ) marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -83,17 +81,17 @@ func TestBulkInsert(t *testing.T) { Schema: marshaledSchema, ShardsNum: common.DefaultShardsNum, }) - assert.NoError(t, err) + s.NoError(err) if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) - t.FailNow() + s.FailNow("failed to create collection") } - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.Equal(t, showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) err = GenerateNumpyFile(c.chunkManager.RootPath()+"/"+"embeddings.npy", 100, schemapb.DataType_FloatVector, []*commonpb.KeyValuePair{ @@ -102,14 +100,14 @@ func TestBulkInsert(t *testing.T) { Value: strconv.Itoa(Dim), }, }) - assert.NoError(t, err) + s.NoError(err) err = GenerateNumpyFile(c.chunkManager.RootPath()+"/"+"image_path.npy", 100, schemapb.DataType_VarChar, []*commonpb.KeyValuePair{ { Key: common.MaxLengthKey, Value: strconv.Itoa(65535), }, }) - assert.NoError(t, err) + s.NoError(err) bulkInsertFiles := []string{ c.chunkManager.RootPath() + "/" + "embeddings.npy", @@ -117,13 +115,13 @@ func TestBulkInsert(t *testing.T) { } health1, err := c.dataCoord.CheckHealth(ctx, &milvuspb.CheckHealthRequest{}) - assert.NoError(t, err) + s.NoError(err) log.Info("dataCoord health", zap.Any("health1", health1)) importResp, err := c.proxy.Import(ctx, &milvuspb.ImportRequest{ CollectionName: collectionName, Files: bulkInsertFiles, }) - assert.NoError(t, err) + s.NoError(err) log.Info("Import result", zap.Any("importResp", importResp), zap.Int64s("tasks", importResp.GetTasks())) tasks := importResp.GetTasks() @@ -133,7 +131,7 @@ func TestBulkInsert(t *testing.T) { importTaskState, err := c.proxy.GetImportState(ctx, &milvuspb.GetImportStateRequest{ Task: task, }) - assert.NoError(t, err) + s.NoError(err) switch importTaskState.GetState() { case commonpb.ImportState_ImportCompleted: break loop @@ -150,12 +148,12 @@ func TestBulkInsert(t *testing.T) { } health2, err := c.dataCoord.CheckHealth(ctx, &milvuspb.CheckHealthRequest{}) - assert.NoError(t, err) + s.NoError(err) log.Info("dataCoord health", zap.Any("health2", health2)) segments, err := c.metaWatcher.ShowSegments() - assert.NoError(t, err) - assert.NotEmpty(t, segments) + s.NoError(err) + s.NotEmpty(segments) for _, segment := range segments { log.Info("ShowSegments result", zap.String("segment", segment.String())) } @@ -170,21 +168,21 @@ func TestBulkInsert(t *testing.T) { if createIndexStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createIndexStatus fail reason", zap.String("reason", createIndexStatus.GetReason())) } - assert.NoError(t, err) - assert.Equal(t, commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) + s.NoError(err) + s.Equal(commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) - waitingForIndexBuilt(ctx, c, t, collectionName, "embeddings") + waitingForIndexBuilt(ctx, c, s.T(), collectionName, "embeddings") // load loadStatus, err := c.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.NoError(t, err) + s.NoError(err) if loadStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("loadStatus fail reason", zap.String("reason", loadStatus.GetReason())) } - assert.Equal(t, commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) + s.Equal(commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) waitingForLoad(ctx, c, collectionName) // search @@ -202,8 +200,8 @@ func TestBulkInsert(t *testing.T) { if searchResult.GetStatus().GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("searchResult fail reason", zap.String("reason", searchResult.GetStatus().GetReason())) } - assert.NoError(t, err) - assert.Equal(t, commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) + s.NoError(err) + s.Equal(commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) log.Info("======================") log.Info("======================") @@ -212,6 +210,10 @@ func TestBulkInsert(t *testing.T) { log.Info("======================") } +func TestBulkInsert(t *testing.T) { + suite.Run(t, new(BulkInsertSuite)) +} + func GenerateNumpyFile(filePath string, rowCount int, dType schemapb.DataType, typeParams []*commonpb.KeyValuePair) error { if dType == schemapb.DataType_VarChar { var data []string diff --git a/tests/integration/get_index_statistics_test.go b/tests/integration/get_index_statistics_test.go index 562661028f..5085788b65 100644 --- a/tests/integration/get_index_statistics_test.go +++ b/tests/integration/get_index_statistics_test.go @@ -2,10 +2,8 @@ package integration import ( "context" - "testing" "github.com/golang/protobuf/proto" - "github.com/stretchr/testify/assert" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/commonpb" @@ -16,14 +14,14 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" ) -func TestGetIndexStatistics(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +type GetIndexStatisticsSuite struct { + MiniClusterSuite +} + +func (s *GetIndexStatisticsSuite) TestGetIndexStatistics() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() prefix := "TestGetIndexStatistics" dbName := "" @@ -33,7 +31,7 @@ func TestGetIndexStatistics(t *testing.T) { schema := constructSchema(collectionName, dim, true) marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -41,11 +39,11 @@ func TestGetIndexStatistics(t *testing.T) { Schema: marshaledSchema, ShardsNum: 2, }) - assert.NoError(t, err) + s.NoError(err) if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) } - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) hashKeys := generateHashKeys(rowNum) @@ -56,19 +54,19 @@ func TestGetIndexStatistics(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.Equal(t, insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) // flush flushResp, err := c.proxy.Flush(ctx, &milvuspb.FlushRequest{ DbName: dbName, CollectionNames: []string{collectionName}, }) - assert.NoError(t, err) + s.NoError(err) segmentIDs, has := flushResp.GetCollSegIDs()[collectionName] ids := segmentIDs.GetData() - assert.NotEmpty(t, segmentIDs) - assert.Equal(t, true, has) + s.NotEmpty(segmentIDs) + s.Equal(true, has) waitingForFlush(ctx, c, ids) // create index @@ -82,20 +80,20 @@ func TestGetIndexStatistics(t *testing.T) { if createIndexStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createIndexStatus fail reason", zap.String("reason", createIndexStatus.GetReason())) } - assert.NoError(t, err) - assert.Equal(t, commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) + s.NoError(err) + s.Equal(commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) - waitingForIndexBuilt(ctx, c, t, collectionName, floatVecField) + waitingForIndexBuilt(ctx, c, s.T(), collectionName, floatVecField) getIndexStatisticsResponse, err := c.proxy.GetIndexStatistics(ctx, &milvuspb.GetIndexStatisticsRequest{ CollectionName: collectionName, IndexName: indexName, }) - assert.NoError(t, err) + s.NoError(err) indexInfos := getIndexStatisticsResponse.GetIndexDescriptions() - assert.Equal(t, 1, len(indexInfos)) - assert.Equal(t, int64(3000), indexInfos[0].IndexedRows) - assert.Equal(t, int64(3000), indexInfos[0].TotalRows) + s.Equal(1, len(indexInfos)) + s.Equal(int64(3000), indexInfos[0].IndexedRows) + s.Equal(int64(3000), indexInfos[0].TotalRows) // skip second insert case for now // the result is not certain @@ -107,32 +105,32 @@ func TestGetIndexStatistics(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.Equal(t, insertResult2.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(insertResult2.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) _, err = c.proxy.Flush(ctx, &milvuspb.FlushRequest{ DbName: dbName, CollectionNames: []string{collectionName}, }) - assert.NoError(t, err) + s.NoError(err) segmentIDs2, has2 := flushResp.GetCollSegIDs()[collectionName] ids2 := segmentIDs2.GetData() - assert.NotEmpty(t, segmentIDs) - assert.Equal(t, true, has2) + s.NotEmpty(segmentIDs) + s.Equal(true, has2) waitingForFlush(ctx, c, ids2) loadStatus, err := c.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.NoError(t, err) + s.NoError(err) if loadStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("loadStatus fail reason", zap.String("reason", loadStatus.GetReason())) } - assert.Equal(t, commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) + s.Equal(commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) waitingForLoad(ctx, c, collectionName) - assert.NoError(t, err) + s.NoError(err) waitingForIndexBuilt(ctx, c, t, collectionName, floatVecField) @@ -140,11 +138,11 @@ func TestGetIndexStatistics(t *testing.T) { CollectionName: collectionName, IndexName: indexName, }) - assert.NoError(t, err) + s.NoError(err) indexInfos2 := getIndexStatisticsResponse2.GetIndexDescriptions() - assert.Equal(t, 1, len(indexInfos2)) - assert.Equal(t, int64(6000), indexInfos2[0].IndexedRows) - assert.Equal(t, int64(6000), indexInfos2[0].TotalRows) + s.Equal(1, len(indexInfos2)) + s.Equal(int64(6000), indexInfos2[0].IndexedRows) + s.Equal(int64(6000), indexInfos2[0].TotalRows) */ log.Info("TestGetIndexStatistics succeed") diff --git a/tests/integration/get_vector_test.go b/tests/integration/get_vector_test.go index d8c0ffe3bf..e881991d98 100644 --- a/tests/integration/get_vector_test.go +++ b/tests/integration/get_vector_test.go @@ -21,7 +21,6 @@ import ( "fmt" "strconv" "testing" - "time" "github.com/golang/protobuf/proto" "github.com/stretchr/testify/suite" @@ -36,11 +35,7 @@ import ( ) type TestGetVectorSuite struct { - suite.Suite - - ctx context.Context - cancel context.CancelFunc - cluster *MiniCluster + MiniClusterSuite // test params nq int @@ -51,25 +46,12 @@ type TestGetVectorSuite struct { vecType schemapb.DataType } -func (suite *TestGetVectorSuite) SetupTest() { - suite.ctx, suite.cancel = context.WithTimeout(context.Background(), time.Second*180) +func (s *TestGetVectorSuite) run() { + ctx, cancel := context.WithCancel(s.Cluster.ctx) + defer cancel() - var err error - suite.cluster, err = StartMiniCluster(suite.ctx) - suite.Require().NoError(err) - err = suite.cluster.Start() - suite.Require().NoError(err) -} - -func (suite *TestGetVectorSuite) TearDownTest() { - err := suite.cluster.Stop() - suite.Require().NoError(err) - suite.cancel() -} - -func (suite *TestGetVectorSuite) run() { collection := fmt.Sprintf("TestGetVector_%d_%d_%s_%s_%s", - suite.nq, suite.topK, suite.indexType, suite.metricType, funcutil.GenRandomStr()) + s.nq, s.topK, s.indexType, s.metricType, funcutil.GenRandomStr()) const ( NB = 10000 @@ -83,7 +65,7 @@ func (suite *TestGetVectorSuite) run() { Name: pkFieldName, IsPrimaryKey: true, Description: "", - DataType: suite.pkType, + DataType: s.pkType, TypeParams: []*commonpb.KeyValuePair{ { Key: common.MaxLengthKey, @@ -98,7 +80,7 @@ func (suite *TestGetVectorSuite) run() { Name: vecFieldName, IsPrimaryKey: false, Description: "", - DataType: suite.vecType, + DataType: s.vecType, TypeParams: []*commonpb.KeyValuePair{ { Key: common.DimKey, @@ -109,96 +91,96 @@ func (suite *TestGetVectorSuite) run() { } schema := constructSchema(collection, dim, false, pk, fVec) marshaledSchema, err := proto.Marshal(schema) - suite.Require().NoError(err) + s.Require().NoError(err) - createCollectionStatus, err := suite.cluster.proxy.CreateCollection(suite.ctx, &milvuspb.CreateCollectionRequest{ + createCollectionStatus, err := s.Cluster.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ CollectionName: collection, Schema: marshaledSchema, ShardsNum: 2, }) - suite.Require().NoError(err) - suite.Require().Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Require().NoError(err) + s.Require().Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) fieldsData := make([]*schemapb.FieldData, 0) - if suite.pkType == schemapb.DataType_Int64 { + if s.pkType == schemapb.DataType_Int64 { fieldsData = append(fieldsData, newInt64FieldData(pkFieldName, NB)) } else { fieldsData = append(fieldsData, newStringFieldData(pkFieldName, NB)) } var vecFieldData *schemapb.FieldData - if suite.vecType == schemapb.DataType_FloatVector { + if s.vecType == schemapb.DataType_FloatVector { vecFieldData = newFloatVectorFieldData(vecFieldName, NB, dim) } else { vecFieldData = newBinaryVectorFieldData(vecFieldName, NB, dim) } fieldsData = append(fieldsData, vecFieldData) hashKeys := generateHashKeys(NB) - _, err = suite.cluster.proxy.Insert(suite.ctx, &milvuspb.InsertRequest{ + _, err = s.Cluster.proxy.Insert(ctx, &milvuspb.InsertRequest{ CollectionName: collection, FieldsData: fieldsData, HashKeys: hashKeys, NumRows: uint32(NB), }) - suite.Require().NoError(err) - suite.Require().Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Require().NoError(err) + s.Require().Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) // flush - flushResp, err := suite.cluster.proxy.Flush(suite.ctx, &milvuspb.FlushRequest{ + flushResp, err := s.Cluster.proxy.Flush(ctx, &milvuspb.FlushRequest{ CollectionNames: []string{collection}, }) - suite.Require().NoError(err) + s.Require().NoError(err) segmentIDs, has := flushResp.GetCollSegIDs()[collection] ids := segmentIDs.GetData() - suite.Require().NotEmpty(segmentIDs) - suite.Require().True(has) + s.Require().NotEmpty(segmentIDs) + s.Require().True(has) - segments, err := suite.cluster.metaWatcher.ShowSegments() - suite.Require().NoError(err) - suite.Require().NotEmpty(segments) + segments, err := s.Cluster.metaWatcher.ShowSegments() + s.Require().NoError(err) + s.Require().NotEmpty(segments) - waitingForFlush(suite.ctx, suite.cluster, ids) + waitingForFlush(ctx, s.Cluster, ids) // create index - _, err = suite.cluster.proxy.CreateIndex(suite.ctx, &milvuspb.CreateIndexRequest{ + _, err = s.Cluster.proxy.CreateIndex(ctx, &milvuspb.CreateIndexRequest{ CollectionName: collection, FieldName: vecFieldName, IndexName: "_default", - ExtraParams: constructIndexParam(dim, suite.indexType, suite.metricType), + ExtraParams: constructIndexParam(dim, s.indexType, s.metricType), }) - suite.Require().NoError(err) - suite.Require().Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Require().NoError(err) + s.Require().Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) - waitingForIndexBuilt(suite.ctx, suite.cluster, suite.T(), collection, vecFieldName) + waitingForIndexBuilt(ctx, s.Cluster, s.T(), collection, vecFieldName) // load - _, err = suite.cluster.proxy.LoadCollection(suite.ctx, &milvuspb.LoadCollectionRequest{ + _, err = s.Cluster.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ CollectionName: collection, }) - suite.Require().NoError(err) - suite.Require().Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) - waitingForLoad(suite.ctx, suite.cluster, collection) + s.Require().NoError(err) + s.Require().Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + waitingForLoad(ctx, s.Cluster, collection) // search - nq := suite.nq - topk := suite.topK + nq := s.nq + topk := s.topK outputFields := []string{vecFieldName} - params := getSearchParams(suite.indexType, suite.metricType) + params := getSearchParams(s.indexType, s.metricType) searchReq := constructSearchRequest("", collection, "", - vecFieldName, suite.vecType, outputFields, suite.metricType, params, nq, dim, topk, -1) + vecFieldName, s.vecType, outputFields, s.metricType, params, nq, dim, topk, -1) - searchResp, err := suite.cluster.proxy.Search(suite.ctx, searchReq) - suite.Require().NoError(err) - suite.Require().Equal(searchResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + searchResp, err := s.Cluster.proxy.Search(ctx, searchReq) + s.Require().NoError(err) + s.Require().Equal(searchResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) result := searchResp.GetResults() - if suite.pkType == schemapb.DataType_Int64 { - suite.Require().Len(result.GetIds().GetIntId().GetData(), nq*topk) + if s.pkType == schemapb.DataType_Int64 { + s.Require().Len(result.GetIds().GetIntId().GetData(), nq*topk) } else { - suite.Require().Len(result.GetIds().GetStrId().GetData(), nq*topk) + s.Require().Len(result.GetIds().GetStrId().GetData(), nq*topk) } - suite.Require().Len(result.GetScores(), nq*topk) - suite.Require().GreaterOrEqual(len(result.GetFieldsData()), 1) + s.Require().Len(result.GetScores(), nq*topk) + s.Require().GreaterOrEqual(len(result.GetFieldsData()), 1) var vecFieldIndex = -1 for i, fieldData := range result.GetFieldsData() { if typeutil.IsVectorType(fieldData.GetType()) { @@ -206,162 +188,162 @@ func (suite *TestGetVectorSuite) run() { break } } - suite.Require().EqualValues(nq, result.GetNumQueries()) - suite.Require().EqualValues(topk, result.GetTopK()) + s.Require().EqualValues(nq, result.GetNumQueries()) + s.Require().EqualValues(topk, result.GetTopK()) // check output vectors - if suite.vecType == schemapb.DataType_FloatVector { - suite.Require().Len(result.GetFieldsData()[vecFieldIndex].GetVectors().GetFloatVector().GetData(), nq*topk*dim) + if s.vecType == schemapb.DataType_FloatVector { + s.Require().Len(result.GetFieldsData()[vecFieldIndex].GetVectors().GetFloatVector().GetData(), nq*topk*dim) rawData := vecFieldData.GetVectors().GetFloatVector().GetData() resData := result.GetFieldsData()[vecFieldIndex].GetVectors().GetFloatVector().GetData() - if suite.pkType == schemapb.DataType_Int64 { + if s.pkType == schemapb.DataType_Int64 { for i, id := range result.GetIds().GetIntId().GetData() { expect := rawData[int(id)*dim : (int(id)+1)*dim] actual := resData[i*dim : (i+1)*dim] - suite.Require().ElementsMatch(expect, actual) + s.Require().ElementsMatch(expect, actual) } } else { for i, idStr := range result.GetIds().GetStrId().GetData() { id, err := strconv.Atoi(idStr) - suite.Require().NoError(err) + s.Require().NoError(err) expect := rawData[id*dim : (id+1)*dim] actual := resData[i*dim : (i+1)*dim] - suite.Require().ElementsMatch(expect, actual) + s.Require().ElementsMatch(expect, actual) } } } else { - suite.Require().Len(result.GetFieldsData()[vecFieldIndex].GetVectors().GetBinaryVector(), nq*topk*dim/8) + s.Require().Len(result.GetFieldsData()[vecFieldIndex].GetVectors().GetBinaryVector(), nq*topk*dim/8) rawData := vecFieldData.GetVectors().GetBinaryVector() resData := result.GetFieldsData()[vecFieldIndex].GetVectors().GetBinaryVector() - if suite.pkType == schemapb.DataType_Int64 { + if s.pkType == schemapb.DataType_Int64 { for i, id := range result.GetIds().GetIntId().GetData() { dataBytes := dim / 8 for j := 0; j < dataBytes; j++ { expect := rawData[int(id)*dataBytes+j] actual := resData[i*dataBytes+j] - suite.Require().Equal(expect, actual) + s.Require().Equal(expect, actual) } } } else { for i, idStr := range result.GetIds().GetStrId().GetData() { dataBytes := dim / 8 id, err := strconv.Atoi(idStr) - suite.Require().NoError(err) + s.Require().NoError(err) for j := 0; j < dataBytes; j++ { expect := rawData[id*dataBytes+j] actual := resData[i*dataBytes+j] - suite.Require().Equal(expect, actual) + s.Require().Equal(expect, actual) } } } } - status, err := suite.cluster.proxy.DropCollection(suite.ctx, &milvuspb.DropCollectionRequest{ + status, err := s.Cluster.proxy.DropCollection(ctx, &milvuspb.DropCollectionRequest{ CollectionName: collection, }) - suite.Require().NoError(err) - suite.Require().Equal(status.GetErrorCode(), commonpb.ErrorCode_Success) + s.Require().NoError(err) + s.Require().Equal(status.GetErrorCode(), commonpb.ErrorCode_Success) } -func (suite *TestGetVectorSuite) TestGetVector_FLAT() { - suite.nq = 10 - suite.topK = 10 - suite.indexType = IndexFaissIDMap - suite.metricType = distance.L2 - suite.pkType = schemapb.DataType_Int64 - suite.vecType = schemapb.DataType_FloatVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_FLAT() { + s.nq = 10 + s.topK = 10 + s.indexType = IndexFaissIDMap + s.metricType = distance.L2 + s.pkType = schemapb.DataType_Int64 + s.vecType = schemapb.DataType_FloatVector + s.run() } -func (suite *TestGetVectorSuite) TestGetVector_IVF_FLAT() { - suite.nq = 10 - suite.topK = 10 - suite.indexType = IndexFaissIvfFlat - suite.metricType = distance.L2 - suite.pkType = schemapb.DataType_Int64 - suite.vecType = schemapb.DataType_FloatVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_IVF_FLAT() { + s.nq = 10 + s.topK = 10 + s.indexType = IndexFaissIvfFlat + s.metricType = distance.L2 + s.pkType = schemapb.DataType_Int64 + s.vecType = schemapb.DataType_FloatVector + s.run() } -func (suite *TestGetVectorSuite) TestGetVector_IVF_PQ() { - suite.nq = 10 - suite.topK = 10 - suite.indexType = IndexFaissIvfPQ - suite.metricType = distance.L2 - suite.pkType = schemapb.DataType_Int64 - suite.vecType = schemapb.DataType_FloatVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_IVF_PQ() { + s.nq = 10 + s.topK = 10 + s.indexType = IndexFaissIvfPQ + s.metricType = distance.L2 + s.pkType = schemapb.DataType_Int64 + s.vecType = schemapb.DataType_FloatVector + s.run() } -func (suite *TestGetVectorSuite) TestGetVector_IVF_SQ8() { - suite.nq = 10 - suite.topK = 10 - suite.indexType = IndexFaissIvfSQ8 - suite.metricType = distance.L2 - suite.pkType = schemapb.DataType_Int64 - suite.vecType = schemapb.DataType_FloatVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_IVF_SQ8() { + s.nq = 10 + s.topK = 10 + s.indexType = IndexFaissIvfSQ8 + s.metricType = distance.L2 + s.pkType = schemapb.DataType_Int64 + s.vecType = schemapb.DataType_FloatVector + s.run() } -func (suite *TestGetVectorSuite) TestGetVector_HNSW() { - suite.nq = 10 - suite.topK = 10 - suite.indexType = IndexHNSW - suite.metricType = distance.L2 - suite.pkType = schemapb.DataType_Int64 - suite.vecType = schemapb.DataType_FloatVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_HNSW() { + s.nq = 10 + s.topK = 10 + s.indexType = IndexHNSW + s.metricType = distance.L2 + s.pkType = schemapb.DataType_Int64 + s.vecType = schemapb.DataType_FloatVector + s.run() } -func (suite *TestGetVectorSuite) TestGetVector_IP() { - suite.nq = 10 - suite.topK = 10 - suite.indexType = IndexHNSW - suite.metricType = distance.IP - suite.pkType = schemapb.DataType_Int64 - suite.vecType = schemapb.DataType_FloatVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_IP() { + s.nq = 10 + s.topK = 10 + s.indexType = IndexHNSW + s.metricType = distance.IP + s.pkType = schemapb.DataType_Int64 + s.vecType = schemapb.DataType_FloatVector + s.run() } -func (suite *TestGetVectorSuite) TestGetVector_StringPK() { - suite.nq = 10 - suite.topK = 10 - suite.indexType = IndexHNSW - suite.metricType = distance.L2 - suite.pkType = schemapb.DataType_VarChar - suite.vecType = schemapb.DataType_FloatVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_StringPK() { + s.nq = 10 + s.topK = 10 + s.indexType = IndexHNSW + s.metricType = distance.L2 + s.pkType = schemapb.DataType_VarChar + s.vecType = schemapb.DataType_FloatVector + s.run() } -func (suite *TestGetVectorSuite) TestGetVector_BinaryVector() { - suite.nq = 10 - suite.topK = 10 - suite.indexType = IndexFaissBinIvfFlat - suite.metricType = distance.JACCARD - suite.pkType = schemapb.DataType_Int64 - suite.vecType = schemapb.DataType_BinaryVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_BinaryVector() { + s.nq = 10 + s.topK = 10 + s.indexType = IndexFaissBinIvfFlat + s.metricType = distance.JACCARD + s.pkType = schemapb.DataType_Int64 + s.vecType = schemapb.DataType_BinaryVector + s.run() } -func (suite *TestGetVectorSuite) TestGetVector_Big_NQ_TOPK() { - suite.T().Skip("skip big NQ Top due to timeout") - suite.nq = 10000 - suite.topK = 200 - suite.indexType = IndexHNSW - suite.metricType = distance.L2 - suite.pkType = schemapb.DataType_Int64 - suite.vecType = schemapb.DataType_FloatVector - suite.run() +func (s *TestGetVectorSuite) TestGetVector_Big_NQ_TOPK() { + s.T().Skip("skip big NQ Top due to timeout") + s.nq = 10000 + s.topK = 200 + s.indexType = IndexHNSW + s.metricType = distance.L2 + s.pkType = schemapb.DataType_Int64 + s.vecType = schemapb.DataType_FloatVector + s.run() } -//func (suite *TestGetVectorSuite) TestGetVector_DISKANN() { -// suite.nq = 10 -// suite.topK = 10 -// suite.indexType = IndexDISKANN -// suite.metricType = distance.L2 -// suite.pkType = schemapb.DataType_Int64 -// suite.vecType = schemapb.DataType_FloatVector -// suite.run() +//func (s *TestGetVectorSuite) TestGetVector_DISKANN() { +// s.nq = 10 +// s.topK = 10 +// s.indexType = IndexDISKANN +// s.metricType = distance.L2 +// s.pkType = schemapb.DataType_Int64 +// s.vecType = schemapb.DataType_FloatVector +// s.run() //} func TestGetVector(t *testing.T) { diff --git a/tests/integration/hello_milvus_test.go b/tests/integration/hello_milvus_test.go index a2c1c0e982..069332ce64 100644 --- a/tests/integration/hello_milvus_test.go +++ b/tests/integration/hello_milvus_test.go @@ -20,10 +20,9 @@ import ( "context" "fmt" "testing" - "time" "github.com/golang/protobuf/proto" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/commonpb" @@ -35,18 +34,14 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" ) -func TestHelloMilvus(t *testing.T) { - ctx, cancel := context.WithTimeout(context.Background(), time.Second*180) +type HelloMilvusSuite struct { + MiniClusterSuite +} + +func (s *HelloMilvusSuite) TestHelloMilvus() { + ctx, cancel := context.WithCancel(s.Cluster.ctx) defer cancel() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer func() { - err = c.Stop() - assert.NoError(t, err) - cancel() - }() + c := s.Cluster const ( dim = 128 @@ -58,7 +53,7 @@ func TestHelloMilvus(t *testing.T) { schema := constructSchema(collectionName, dim, true) marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -66,16 +61,16 @@ func TestHelloMilvus(t *testing.T) { Schema: marshaledSchema, ShardsNum: common.DefaultShardsNum, }) - assert.NoError(t, err) + s.NoError(err) if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) } - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.Equal(t, showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) @@ -87,23 +82,23 @@ func TestHelloMilvus(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.Equal(t, insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) // flush flushResp, err := c.proxy.Flush(ctx, &milvuspb.FlushRequest{ DbName: dbName, CollectionNames: []string{collectionName}, }) - assert.NoError(t, err) + s.NoError(err) segmentIDs, has := flushResp.GetCollSegIDs()[collectionName] ids := segmentIDs.GetData() - assert.NotEmpty(t, segmentIDs) - assert.True(t, has) + s.NotEmpty(segmentIDs) + s.True(has) segments, err := c.metaWatcher.ShowSegments() - assert.NoError(t, err) - assert.NotEmpty(t, segments) + s.NoError(err) + s.NotEmpty(segments) for _, segment := range segments { log.Info("ShowSegments result", zap.String("segment", segment.String())) } @@ -119,21 +114,21 @@ func TestHelloMilvus(t *testing.T) { if createIndexStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createIndexStatus fail reason", zap.String("reason", createIndexStatus.GetReason())) } - assert.NoError(t, err) - assert.Equal(t, commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) + s.NoError(err) + s.Equal(commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) - waitingForIndexBuilt(ctx, c, t, collectionName, floatVecField) + waitingForIndexBuilt(ctx, c, s.T(), collectionName, floatVecField) // load loadStatus, err := c.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.NoError(t, err) + s.NoError(err) if loadStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("loadStatus fail reason", zap.String("reason", loadStatus.GetReason())) } - assert.Equal(t, commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) + s.Equal(commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) waitingForLoad(ctx, c, collectionName) // search @@ -151,8 +146,13 @@ func TestHelloMilvus(t *testing.T) { if searchResult.GetStatus().GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("searchResult fail reason", zap.String("reason", searchResult.GetStatus().GetReason())) } - assert.NoError(t, err) - assert.Equal(t, commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) + s.NoError(err) + s.Equal(commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) log.Info("TestHelloMilvus succeed") + +} + +func TestHelloMilvus(t *testing.T) { + suite.Run(t, new(HelloMilvusSuite)) } diff --git a/tests/integration/json_expr_test.go b/tests/integration/json_expr_test.go index 2eeef4df4a..fd22a1fee0 100644 --- a/tests/integration/json_expr_test.go +++ b/tests/integration/json_expr_test.go @@ -27,6 +27,7 @@ import ( "github.com/cockroachdb/errors" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/distance" + "github.com/stretchr/testify/suite" "github.com/golang/protobuf/proto" "github.com/milvus-io/milvus-proto/go-api/commonpb" @@ -34,18 +35,98 @@ import ( "github.com/milvus-io/milvus-proto/go-api/schemapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/funcutil" - "github.com/stretchr/testify/assert" "go.uber.org/zap" ) -func TestJson_EnableDynamicSchema(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +type JSONExprSuite struct { + MiniClusterSuite +} + +func (s *JSONExprSuite) TestJsonEnableDynamicSchema() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() + prefix := "TestHelloMilvus" + dbName := "" + collectionName := prefix + funcutil.GenRandomStr() + dim := 128 + rowNum := 100 + + constructCollectionSchema := func() *schemapb.CollectionSchema { + pk := &schemapb.FieldSchema{ + FieldID: 0, + Name: int64Field, + IsPrimaryKey: true, + Description: "", + DataType: schemapb.DataType_Int64, + TypeParams: nil, + IndexParams: nil, + AutoID: true, + } + fVec := &schemapb.FieldSchema{ + FieldID: 0, + Name: floatVecField, + IsPrimaryKey: false, + Description: "", + DataType: schemapb.DataType_FloatVector, + TypeParams: []*commonpb.KeyValuePair{ + { + Key: common.DimKey, + Value: strconv.Itoa(dim), + }, + }, + IndexParams: nil, + AutoID: false, + } + return &schemapb.CollectionSchema{ + Name: collectionName, + Description: "", + AutoID: false, + EnableDynamicField: true, + Fields: []*schemapb.FieldSchema{ + pk, + fVec, + }, + } + } + schema := constructCollectionSchema() + marshaledSchema, err := proto.Marshal(schema) + s.NoError(err) + + createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ + DbName: dbName, + CollectionName: collectionName, + Schema: marshaledSchema, + ShardsNum: 2, + }) + s.NoError(err) + if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { + log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) + } + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + + log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) + showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) + s.NoError(err) + s.Equal(showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) + + describeCollectionResp, err := c.proxy.DescribeCollection(ctx, &milvuspb.DescribeCollectionRequest{CollectionName: collectionName}) + s.NoError(err) + s.True(describeCollectionResp.Schema.EnableDynamicField) + s.Equal(3, len(describeCollectionResp.GetSchema().GetFields())) + + fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) + jsonData := newJSONData(common.MetaFieldName, rowNum) + s.insertFlushIndexLoad(ctx, c, dbName, collectionName, rowNum, dim, []*schemapb.FieldData{fVecColumn, jsonData}) + + s.checkSearch(c, collectionName, common.MetaFieldName, dim) +} + +func (s *JSONExprSuite) TestJSON_InsertWithoutDynamicData() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() prefix := "TestHelloMilvus" dbName := "" @@ -92,7 +173,7 @@ func TestJson_EnableDynamicSchema(t *testing.T) { } schema := constructCollectionSchema() marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -100,330 +181,490 @@ func TestJson_EnableDynamicSchema(t *testing.T) { Schema: marshaledSchema, ShardsNum: 2, }) - assert.NoError(t, err) + s.NoError(err) if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) } - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.Equal(t, showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) describeCollectionResp, err := c.proxy.DescribeCollection(ctx, &milvuspb.DescribeCollectionRequest{CollectionName: collectionName}) - assert.NoError(t, err) - assert.True(t, describeCollectionResp.Schema.EnableDynamicField) - assert.Equal(t, 3, len(describeCollectionResp.GetSchema().GetFields())) + s.NoError(err) + s.True(describeCollectionResp.Schema.EnableDynamicField) + s.Equal(3, len(describeCollectionResp.GetSchema().GetFields())) fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) - jsonData := newJSONData(common.MetaFieldName, rowNum) - insertFlushIndexLoad(ctx, t, c, dbName, collectionName, rowNum, dim, []*schemapb.FieldData{fVecColumn, jsonData}) + s.insertFlushIndexLoad(ctx, c, dbName, collectionName, rowNum, dim, []*schemapb.FieldData{fVecColumn}) - checkSearch(t, c, collectionName, common.MetaFieldName, dim) -} - -func checkSearch(t *testing.T, c *MiniCluster, collectionName, fieldName string, dim int) { expr := "" // search expr = `$meta["A"] > 90` checkFunc := func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 5, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(0, len(result.Results.FieldsData)) } - doSearch(c, collectionName, []string{"A"}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{common.MetaFieldName}, expr, dim, checkFunc) + log.Info("GT expression run successfully") +} + +func (s *JSONExprSuite) TestJSON_DynamicSchemaWithJSON() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() + + prefix := "TestHelloMilvus" + dbName := "" + collectionName := prefix + funcutil.GenRandomStr() + dim := 128 + rowNum := 100 + + constructCollectionSchema := func() *schemapb.CollectionSchema { + pk := &schemapb.FieldSchema{ + FieldID: 0, + Name: int64Field, + IsPrimaryKey: true, + Description: "", + DataType: schemapb.DataType_Int64, + TypeParams: nil, + IndexParams: nil, + AutoID: true, + } + fVec := &schemapb.FieldSchema{ + FieldID: 0, + Name: floatVecField, + IsPrimaryKey: false, + Description: "", + DataType: schemapb.DataType_FloatVector, + TypeParams: []*commonpb.KeyValuePair{ + { + Key: common.DimKey, + Value: strconv.Itoa(dim), + }, + }, + IndexParams: nil, + AutoID: false, + } + j := &schemapb.FieldSchema{ + Name: jsonField, + Description: "json field", + DataType: schemapb.DataType_JSON, + } + return &schemapb.CollectionSchema{ + Name: collectionName, + Description: "", + AutoID: false, + EnableDynamicField: true, + Fields: []*schemapb.FieldSchema{ + pk, + fVec, + j, + }, + } + } + schema := constructCollectionSchema() + marshaledSchema, err := proto.Marshal(schema) + s.NoError(err) + + createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ + DbName: dbName, + CollectionName: collectionName, + Schema: marshaledSchema, + ShardsNum: 2, + }) + s.NoError(err) + if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { + log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) + } + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + + log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) + showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) + s.NoError(err) + s.Equal(showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) + + describeCollectionResp, err := c.proxy.DescribeCollection(ctx, &milvuspb.DescribeCollectionRequest{CollectionName: collectionName}) + s.NoError(err) + s.True(describeCollectionResp.Schema.EnableDynamicField) + s.Equal(4, len(describeCollectionResp.GetSchema().GetFields())) + + fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) + jsonData := newJSONData(jsonField, rowNum) + dynamicData := newJSONData(common.MetaFieldName, rowNum) + s.insertFlushIndexLoad(ctx, c, dbName, collectionName, rowNum, dim, []*schemapb.FieldData{fVecColumn, jsonData, dynamicData}) + + s.checkSearch(c, collectionName, common.MetaFieldName, dim) + + expr := "" + // search + expr = `jsonField["A"] < 10` + checkFunc := func(result *milvuspb.SearchResults) { + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(jsonField, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(5, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + } + s.doSearch(c, collectionName, []string{jsonField}, expr, dim, checkFunc) + log.Info("LT expression run successfully") + + expr = `jsonField["A"] <= 5` + checkFunc = func(result *milvuspb.SearchResults) { + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(jsonField, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(3, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + } + s.doSearch(c, collectionName, []string{jsonField}, expr, dim, checkFunc) + log.Info("LE expression run successfully") + + expr = `jsonField["A"] == 5` + checkFunc = func(result *milvuspb.SearchResults) { + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(jsonField, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + } + s.doSearch(c, collectionName, []string{jsonField}, expr, dim, checkFunc) + log.Info("EQ expression run successfully") + + expr = `jsonField["C"][0] in [90, 91, 95, 97]` + checkFunc = func(result *milvuspb.SearchResults) { + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(jsonField, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(4, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + } + s.doSearch(c, collectionName, []string{jsonField}, expr, dim, checkFunc) + log.Info("IN expression run successfully") + + expr = `jsonField["C"][0] not in [90, 91, 95, 97]` + checkFunc = func(result *milvuspb.SearchResults) { + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(jsonField, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + } + s.doSearch(c, collectionName, []string{jsonField}, expr, dim, checkFunc) + log.Info("NIN expression run successfully") + + expr = `jsonField["E"]["G"] > 100` + checkFunc = func(result *milvuspb.SearchResults) { + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(jsonField, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(9, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + } + s.doSearch(c, collectionName, []string{jsonField}, expr, dim, checkFunc) + log.Info("nested path expression run successfully") + + expr = `jsonField == ""` + s.doSearchWithInvalidExpr(c, collectionName, []string{jsonField}, expr, dim) +} + +func (s *JSONExprSuite) checkSearch(c *MiniCluster, collectionName, fieldName string, dim int) { + expr := "" + // search + expr = `$meta["A"] > 90` + checkFunc := func(result *milvuspb.SearchResults) { + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(5, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + } + s.doSearch(c, collectionName, []string{"A"}, expr, dim, checkFunc) log.Info("GT expression run successfully") expr = `$meta["A"] < 10` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 5, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(5, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{"B"}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{"B"}, expr, dim, checkFunc) log.Info("LT expression run successfully") expr = `$meta["A"] <= 5` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 3, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(3, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{"C"}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{"C"}, expr, dim, checkFunc) log.Info("LE expression run successfully") expr = `A >= 95` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 3, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(3, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("GE expression run successfully") expr = `$meta["A"] == 5` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("EQ expression run successfully") expr = `A != 95` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("NE expression run successfully") expr = `not (A != 95)` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("NOT NE expression run successfully") expr = `A > 90 && B < 5` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 2, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(2, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("NE expression run successfully") expr = `A > 95 || 5 > B` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 4, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(4, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("NE expression run successfully") expr = `not (A == 95)` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("NOT expression run successfully") expr = `A in [90, 91, 95, 97]` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 3, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(3, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("IN expression run successfully") expr = `A not in [90, 91, 95, 97]` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("NIN expression run successfully") expr = `C[0] in [90, 91, 95, 97]` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 4, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(4, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("IN expression run successfully") expr = `C[0] not in [90, 91, 95, 97]` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("NIN expression run successfully") expr = `0 <= A < 5` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 2, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(2, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("BinaryRange expression run successfully") expr = `100 > A >= 90` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 5, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(5, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("BinaryRange expression run successfully") expr = `1+5 <= A < 5+10` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 4, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(4, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("BinaryRange expression run successfully") expr = `A + 5 == 10` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("Arithmetic expression run successfully") expr = `exists A` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("EXISTS expression run successfully") expr = `exists AAA` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 0, len(result.Results.FieldsData)) + s.Equal(0, len(result.Results.FieldsData)) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("EXISTS expression run successfully") expr = `not exists A` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("NOT EXISTS expression run successfully") expr = `E["G"] > 100` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 9, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(9, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("nested path expression run successfully") expr = `D like "name-%"` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("like expression run successfully") expr = `D like "name-11"` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("like expression run successfully") expr = `A like "10"` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 0, len(result.Results.FieldsData)) + s.Equal(0, len(result.Results.FieldsData)) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("like expression run successfully") expr = `A in []` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 0, len(result.Results.FieldsData)) + s.Equal(0, len(result.Results.FieldsData)) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("term empty expression run successfully") expr = `A not in []` checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, fieldName, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) + s.Equal(1, len(result.Results.FieldsData)) + s.Equal(fieldName, result.Results.FieldsData[0].GetFieldName()) + s.Equal(schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) + s.Equal(10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) } - doSearch(c, collectionName, []string{fieldName}, expr, dim, t, checkFunc) + s.doSearch(c, collectionName, []string{fieldName}, expr, dim, checkFunc) log.Info("term empty expression run successfully") // invalid expr expr = `E[F] > 100` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `A >> 10` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `not A > 5` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `not A == 5` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `A > B` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `A > Int64Field` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `A like abc` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `D like "%name-%"` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `D like "na%me"` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `1+5 <= A+1 < 5+10` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) expr = `$meta == ""` - doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim, t) + s.doSearchWithInvalidExpr(c, collectionName, []string{fieldName}, expr, dim) } -func insertFlushIndexLoad(ctx context.Context, t *testing.T, c *MiniCluster, dbName, collectionName string, rowNum int, dim int, fieldData []*schemapb.FieldData) { +func (s *JSONExprSuite) insertFlushIndexLoad(ctx context.Context, c *MiniCluster, dbName, collectionName string, rowNum int, dim int, fieldData []*schemapb.FieldData) { hashKeys := generateHashKeys(rowNum) insertResult, err := c.proxy.Insert(ctx, &milvuspb.InsertRequest{ DbName: dbName, @@ -432,22 +673,22 @@ func insertFlushIndexLoad(ctx context.Context, t *testing.T, c *MiniCluster, dbN HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.Equal(t, insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) // flush flushResp, err := c.proxy.Flush(ctx, &milvuspb.FlushRequest{ DbName: dbName, CollectionNames: []string{collectionName}, }) - assert.NoError(t, err) + s.NoError(err) segmentIDs, has := flushResp.GetCollSegIDs()[collectionName] ids := segmentIDs.GetData() - assert.NotEmpty(t, segmentIDs) + s.NotEmpty(segmentIDs) segments, err := c.metaWatcher.ShowSegments() - assert.NoError(t, err) - assert.NotEmpty(t, segments) + s.NoError(err) + s.NotEmpty(segments) for _, segment := range segments { log.Info("ShowSegments result", zap.String("segment", segment.String())) } @@ -501,20 +742,20 @@ func insertFlushIndexLoad(ctx context.Context, t *testing.T, c *MiniCluster, dbN if createIndexStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createIndexStatus fail reason", zap.String("reason", createIndexStatus.GetReason())) } - assert.NoError(t, err) - assert.Equal(t, commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) - waitingForIndexBuilt(ctx, c, t, collectionName, floatVecField) + s.NoError(err) + s.Equal(commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) + waitingForIndexBuilt(ctx, c, s.T(), collectionName, floatVecField) // load loadStatus, err := c.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.NoError(t, err) + s.NoError(err) if loadStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("loadStatus fail reason", zap.String("reason", loadStatus.GetReason())) } - assert.Equal(t, commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) + s.Equal(commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) for { loadProgress, err := c.proxy.GetLoadingProgress(ctx, &milvuspb.GetLoadingProgressRequest{ CollectionName: collectionName, @@ -529,7 +770,7 @@ func insertFlushIndexLoad(ctx context.Context, t *testing.T, c *MiniCluster, dbN } } -func doSearch(cluster *MiniCluster, collectionName string, outputField []string, expr string, dim int, t *testing.T, checkFunc func(results *milvuspb.SearchResults)) { +func (s *JSONExprSuite) doSearch(cluster *MiniCluster, collectionName string, outputField []string, expr string, dim int, checkFunc func(results *milvuspb.SearchResults)) { nq := 1 topk := 10 roundDecimal := -1 @@ -543,8 +784,8 @@ func doSearch(cluster *MiniCluster, collectionName string, outputField []string, if searchResult.GetStatus().GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("searchResult fail reason", zap.String("reason", searchResult.GetStatus().GetReason())) } - assert.NoError(t, err) - assert.Equal(t, commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) + s.NoError(err) + s.Equal(commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) log.Info("TestHelloMilvus succeed", zap.Any("result", searchResult.Results), zap.Any("result num", len(searchResult.Results.FieldsData))) @@ -602,7 +843,7 @@ func newJSONData(fieldName string, rowNum int) *schemapb.FieldData { } } -func doSearchWithInvalidExpr(cluster *MiniCluster, collectionName string, outputField []string, expr string, dim int, t *testing.T) { +func (s *JSONExprSuite) doSearchWithInvalidExpr(cluster *MiniCluster, collectionName string, outputField []string, expr string, dim int) { nq := 1 topk := 10 roundDecimal := -1 @@ -616,256 +857,10 @@ func doSearchWithInvalidExpr(cluster *MiniCluster, collectionName string, output if searchResult.GetStatus().GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("searchResult fail reason", zap.String("reason", searchResult.GetStatus().GetReason())) } - assert.NoError(t, err) - assert.NotEqual(t, commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) + s.NoError(err) + s.NotEqual(commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) } -func TestJSON_InsertWithoutDynamicData(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) - - prefix := "TestHelloMilvus" - dbName := "" - collectionName := prefix + funcutil.GenRandomStr() - dim := 128 - rowNum := 100 - - constructCollectionSchema := func() *schemapb.CollectionSchema { - pk := &schemapb.FieldSchema{ - FieldID: 0, - Name: int64Field, - IsPrimaryKey: true, - Description: "", - DataType: schemapb.DataType_Int64, - TypeParams: nil, - IndexParams: nil, - AutoID: true, - } - fVec := &schemapb.FieldSchema{ - FieldID: 0, - Name: floatVecField, - IsPrimaryKey: false, - Description: "", - DataType: schemapb.DataType_FloatVector, - TypeParams: []*commonpb.KeyValuePair{ - { - Key: common.DimKey, - Value: strconv.Itoa(dim), - }, - }, - IndexParams: nil, - AutoID: false, - } - return &schemapb.CollectionSchema{ - Name: collectionName, - Description: "", - AutoID: false, - EnableDynamicField: true, - Fields: []*schemapb.FieldSchema{ - pk, - fVec, - }, - } - } - schema := constructCollectionSchema() - marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) - - createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ - DbName: dbName, - CollectionName: collectionName, - Schema: marshaledSchema, - ShardsNum: 2, - }) - assert.NoError(t, err) - if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { - log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) - } - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) - - log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) - showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.Equal(t, showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) - log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) - - describeCollectionResp, err := c.proxy.DescribeCollection(ctx, &milvuspb.DescribeCollectionRequest{CollectionName: collectionName}) - assert.NoError(t, err) - assert.True(t, describeCollectionResp.Schema.EnableDynamicField) - assert.Equal(t, 3, len(describeCollectionResp.GetSchema().GetFields())) - - fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) - insertFlushIndexLoad(ctx, t, c, dbName, collectionName, rowNum, dim, []*schemapb.FieldData{fVecColumn}) - - expr := "" - // search - expr = `$meta["A"] > 90` - checkFunc := func(result *milvuspb.SearchResults) { - assert.Equal(t, 0, len(result.Results.FieldsData)) - } - doSearch(c, collectionName, []string{common.MetaFieldName}, expr, dim, t, checkFunc) - log.Info("GT expression run successfully") -} - -func TestJSON_DynamicSchemaWithJSON(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) - - prefix := "TestHelloMilvus" - dbName := "" - collectionName := prefix + funcutil.GenRandomStr() - dim := 128 - rowNum := 100 - - constructCollectionSchema := func() *schemapb.CollectionSchema { - pk := &schemapb.FieldSchema{ - FieldID: 0, - Name: int64Field, - IsPrimaryKey: true, - Description: "", - DataType: schemapb.DataType_Int64, - TypeParams: nil, - IndexParams: nil, - AutoID: true, - } - fVec := &schemapb.FieldSchema{ - FieldID: 0, - Name: floatVecField, - IsPrimaryKey: false, - Description: "", - DataType: schemapb.DataType_FloatVector, - TypeParams: []*commonpb.KeyValuePair{ - { - Key: common.DimKey, - Value: strconv.Itoa(dim), - }, - }, - IndexParams: nil, - AutoID: false, - } - j := &schemapb.FieldSchema{ - Name: jsonField, - Description: "json field", - DataType: schemapb.DataType_JSON, - } - return &schemapb.CollectionSchema{ - Name: collectionName, - Description: "", - AutoID: false, - EnableDynamicField: true, - Fields: []*schemapb.FieldSchema{ - pk, - fVec, - j, - }, - } - } - schema := constructCollectionSchema() - marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) - - createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ - DbName: dbName, - CollectionName: collectionName, - Schema: marshaledSchema, - ShardsNum: 2, - }) - assert.NoError(t, err) - if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { - log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) - } - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) - - log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) - showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.Equal(t, showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) - log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) - - describeCollectionResp, err := c.proxy.DescribeCollection(ctx, &milvuspb.DescribeCollectionRequest{CollectionName: collectionName}) - assert.NoError(t, err) - assert.True(t, describeCollectionResp.Schema.EnableDynamicField) - assert.Equal(t, 4, len(describeCollectionResp.GetSchema().GetFields())) - - fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) - jsonData := newJSONData(jsonField, rowNum) - dynamicData := newJSONData(common.MetaFieldName, rowNum) - insertFlushIndexLoad(ctx, t, c, dbName, collectionName, rowNum, dim, []*schemapb.FieldData{fVecColumn, jsonData, dynamicData}) - - checkSearch(t, c, collectionName, common.MetaFieldName, dim) - - expr := "" - // search - expr = `jsonField["A"] < 10` - checkFunc := func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, jsonField, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 5, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) - } - doSearch(c, collectionName, []string{jsonField}, expr, dim, t, checkFunc) - log.Info("LT expression run successfully") - - expr = `jsonField["A"] <= 5` - checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, jsonField, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 3, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) - } - doSearch(c, collectionName, []string{jsonField}, expr, dim, t, checkFunc) - log.Info("LE expression run successfully") - - expr = `jsonField["A"] == 5` - checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, jsonField, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 1, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) - } - doSearch(c, collectionName, []string{jsonField}, expr, dim, t, checkFunc) - log.Info("EQ expression run successfully") - - expr = `jsonField["C"][0] in [90, 91, 95, 97]` - checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, jsonField, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 4, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) - } - doSearch(c, collectionName, []string{jsonField}, expr, dim, t, checkFunc) - log.Info("IN expression run successfully") - - expr = `jsonField["C"][0] not in [90, 91, 95, 97]` - checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, jsonField, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 10, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) - } - doSearch(c, collectionName, []string{jsonField}, expr, dim, t, checkFunc) - log.Info("NIN expression run successfully") - - expr = `jsonField["E"]["G"] > 100` - checkFunc = func(result *milvuspb.SearchResults) { - assert.Equal(t, 1, len(result.Results.FieldsData)) - assert.Equal(t, jsonField, result.Results.FieldsData[0].GetFieldName()) - assert.Equal(t, schemapb.DataType_JSON, result.Results.FieldsData[0].GetType()) - assert.Equal(t, 9, len(result.Results.FieldsData[0].GetScalars().GetJsonData().GetData())) - } - doSearch(c, collectionName, []string{jsonField}, expr, dim, t, checkFunc) - log.Info("nested path expression run successfully") - - expr = `jsonField == ""` - doSearchWithInvalidExpr(c, collectionName, []string{jsonField}, expr, dim, t) +func TestJsonExpr(t *testing.T) { + suite.Run(t, new(JSONExprSuite)) } diff --git a/tests/integration/meta_watcher_test.go b/tests/integration/meta_watcher_test.go index 8c6cfc2b1d..44e62e7842 100644 --- a/tests/integration/meta_watcher_test.go +++ b/tests/integration/meta_watcher_test.go @@ -24,7 +24,7 @@ import ( "github.com/cockroachdb/errors" "github.com/golang/protobuf/proto" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/commonpb" @@ -36,32 +36,23 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" ) -func TestShowSessions(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +type MetaWatcherSuite struct { + MiniClusterSuite +} - sessions, err := c.metaWatcher.ShowSessions() - assert.NoError(t, err) - assert.NotEmpty(t, sessions) +func (s *MetaWatcherSuite) TestShowSessions() { + sessions, err := s.Cluster.metaWatcher.ShowSessions() + s.NoError(err) + s.NotEmpty(sessions) for _, session := range sessions { log.Info("ShowSessions result", zap.String("session", session.String())) } - } -func TestShowSegments(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +func (s *MetaWatcherSuite) TestShowSegments() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() prefix := "TestShowSegments" dbName := "" @@ -109,7 +100,7 @@ func TestShowSegments(t *testing.T) { } schema := constructCollectionSchema() marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -117,13 +108,13 @@ func TestShowSegments(t *testing.T) { Schema: marshaledSchema, ShardsNum: common.DefaultShardsNum, }) - assert.NoError(t, err) - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.Equal(t, showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) @@ -135,25 +126,21 @@ func TestShowSegments(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.Equal(t, insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) segments, err := c.metaWatcher.ShowSegments() - assert.NoError(t, err) - assert.NotEmpty(t, segments) + s.NoError(err) + s.NotEmpty(segments) for _, segment := range segments { log.Info("ShowSegments result", zap.String("segment", segment.String())) } } -func TestShowReplicas(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +func (s *MetaWatcherSuite) TestShowReplicas() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() prefix := "TestShowReplicas" dbName := "" @@ -201,7 +188,7 @@ func TestShowReplicas(t *testing.T) { } schema := constructCollectionSchema() marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -209,16 +196,16 @@ func TestShowReplicas(t *testing.T) { Schema: marshaledSchema, ShardsNum: common.DefaultShardsNum, }) - assert.NoError(t, err) + s.NoError(err) if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) } - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.Equal(t, showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(showCollectionsResp.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) @@ -230,22 +217,22 @@ func TestShowReplicas(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.Equal(t, insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) + s.NoError(err) + s.Equal(insertResult.GetStatus().GetErrorCode(), commonpb.ErrorCode_Success) // flush flushResp, err := c.proxy.Flush(ctx, &milvuspb.FlushRequest{ DbName: dbName, CollectionNames: []string{collectionName}, }) - assert.NoError(t, err) + s.NoError(err) segmentIDs, has := flushResp.GetCollSegIDs()[collectionName] ids := segmentIDs.GetData() - assert.NotEmpty(t, segmentIDs) + s.NotEmpty(segmentIDs) segments, err := c.metaWatcher.ShowSegments() - assert.NoError(t, err) - assert.NotEmpty(t, segments) + s.NoError(err) + s.NotEmpty(segments) for _, segment := range segments { log.Info("ShowSegments result", zap.String("segment", segment.String())) } @@ -296,24 +283,24 @@ func TestShowReplicas(t *testing.T) { }, }, }) - assert.NoError(t, err) + s.NoError(err) if createIndexStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createIndexStatus fail reason", zap.String("reason", createIndexStatus.GetReason())) } - assert.Equal(t, commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) + s.Equal(commonpb.ErrorCode_Success, createIndexStatus.GetErrorCode()) - waitingForIndexBuilt(ctx, c, t, collectionName, floatVecField) + waitingForIndexBuilt(ctx, c, s.T(), collectionName, floatVecField) // load loadStatus, err := c.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.NoError(t, err) + s.NoError(err) if loadStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("loadStatus fail reason", zap.String("reason", loadStatus.GetReason())) } - assert.Equal(t, commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) + s.Equal(commonpb.ErrorCode_Success, loadStatus.GetErrorCode()) for { loadProgress, err := c.proxy.GetLoadingProgress(ctx, &milvuspb.GetLoadingProgressRequest{ CollectionName: collectionName, @@ -328,11 +315,15 @@ func TestShowReplicas(t *testing.T) { } replicas, err := c.metaWatcher.ShowReplicas() - assert.NoError(t, err) - assert.NotEmpty(t, replicas) + s.NoError(err) + s.NotEmpty(replicas) for _, replica := range replicas { log.Info("ShowReplicas result", zap.String("replica", PrettyReplica(replica))) } log.Info("TestShowReplicas succeed") } + +func TestMetaWatcher(t *testing.T) { + suite.Run(t, new(MetaWatcherSuite)) +} diff --git a/tests/integration/minicluster.go b/tests/integration/minicluster.go index a6b22920df..9059add2f8 100644 --- a/tests/integration/minicluster.go +++ b/tests/integration/minicluster.go @@ -310,6 +310,10 @@ func StartMiniCluster(ctx context.Context, opts ...Option) (cluster *MiniCluster return cluster, nil } +func (cluster *MiniCluster) GetContext() context.Context { + return cluster.ctx +} + func (cluster *MiniCluster) Start() error { log.Info("mini cluster start") err := cluster.rootCoord.Init() diff --git a/tests/integration/minicluster_test.go b/tests/integration/minicluster_test.go index 89d96cd8fc..3ec04850ca 100644 --- a/tests/integration/minicluster_test.go +++ b/tests/integration/minicluster_test.go @@ -20,183 +20,170 @@ import ( "context" "testing" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" "github.com/milvus-io/milvus/internal/datanode" "github.com/milvus-io/milvus/internal/indexnode" "github.com/milvus-io/milvus/internal/querynodev2" ) -func TestMiniClusterStartAndStop(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - err = c.Stop() - assert.NoError(t, err) +type MiniClusterMethodsSuite struct { + MiniClusterSuite } -func TestAddRemoveDataNode(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +func (s *MiniClusterMethodsSuite) TestStartAndStop() { + //Do nothing +} + +func (s *MiniClusterMethodsSuite) TestRemoveDataNode() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() datanode := datanode.NewDataNode(ctx, c.factory) datanode.SetEtcdClient(c.etcdCli) //datanode := c.CreateDefaultDataNode() - err = c.AddDataNode(datanode) - assert.NoError(t, err) + err := c.AddDataNode(datanode) + s.NoError(err) - assert.Equal(t, 2, c.clusterConfig.DataNodeNum) - assert.Equal(t, 2, len(c.dataNodes)) + s.Equal(2, c.clusterConfig.DataNodeNum) + s.Equal(2, len(c.dataNodes)) err = c.RemoveDataNode(datanode) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 1, c.clusterConfig.DataNodeNum) - assert.Equal(t, 1, len(c.dataNodes)) + s.Equal(1, c.clusterConfig.DataNodeNum) + s.Equal(1, len(c.dataNodes)) // add default node and remove randomly err = c.AddDataNode(nil) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 2, c.clusterConfig.DataNodeNum) - assert.Equal(t, 2, len(c.dataNodes)) + s.Equal(2, c.clusterConfig.DataNodeNum) + s.Equal(2, len(c.dataNodes)) err = c.RemoveDataNode(nil) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 1, c.clusterConfig.DataNodeNum) - assert.Equal(t, 1, len(c.dataNodes)) + s.Equal(1, c.clusterConfig.DataNodeNum) + s.Equal(1, len(c.dataNodes)) } -func TestAddRemoveQueryNode(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +func (s *MiniClusterMethodsSuite) TestRemoveQueryNode() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() queryNode := querynodev2.NewQueryNode(ctx, c.factory) queryNode.SetEtcdClient(c.etcdCli) //queryNode := c.CreateDefaultQueryNode() - err = c.AddQueryNode(queryNode) - assert.NoError(t, err) + err := c.AddQueryNode(queryNode) + s.NoError(err) - assert.Equal(t, 2, c.clusterConfig.QueryNodeNum) - assert.Equal(t, 2, len(c.queryNodes)) + s.Equal(2, c.clusterConfig.QueryNodeNum) + s.Equal(2, len(c.queryNodes)) err = c.RemoveQueryNode(queryNode) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 1, c.clusterConfig.QueryNodeNum) - assert.Equal(t, 1, len(c.queryNodes)) + s.Equal(1, c.clusterConfig.QueryNodeNum) + s.Equal(1, len(c.queryNodes)) // add default node and remove randomly err = c.AddQueryNode(nil) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 2, c.clusterConfig.QueryNodeNum) - assert.Equal(t, 2, len(c.queryNodes)) + s.Equal(2, c.clusterConfig.QueryNodeNum) + s.Equal(2, len(c.queryNodes)) err = c.RemoveQueryNode(nil) - assert.NoError(t, err) + s.NoError(err) + + s.Equal(1, c.clusterConfig.QueryNodeNum) + s.Equal(1, len(c.queryNodes)) - assert.Equal(t, 1, c.clusterConfig.QueryNodeNum) - assert.Equal(t, 1, len(c.queryNodes)) } -func TestAddRemoveIndexNode(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +func (s *MiniClusterMethodsSuite) TestRemoveIndexNode() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() indexNode := indexnode.NewIndexNode(ctx, c.factory) indexNode.SetEtcdClient(c.etcdCli) //indexNode := c.CreateDefaultIndexNode() - err = c.AddIndexNode(indexNode) - assert.NoError(t, err) + err := c.AddIndexNode(indexNode) + s.NoError(err) - assert.Equal(t, 2, c.clusterConfig.IndexNodeNum) - assert.Equal(t, 2, len(c.indexNodes)) + s.Equal(2, c.clusterConfig.IndexNodeNum) + s.Equal(2, len(c.indexNodes)) err = c.RemoveIndexNode(indexNode) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 1, c.clusterConfig.IndexNodeNum) - assert.Equal(t, 1, len(c.indexNodes)) + s.Equal(1, c.clusterConfig.IndexNodeNum) + s.Equal(1, len(c.indexNodes)) // add default node and remove randomly err = c.AddIndexNode(nil) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 2, c.clusterConfig.IndexNodeNum) - assert.Equal(t, 2, len(c.indexNodes)) + s.Equal(2, c.clusterConfig.IndexNodeNum) + s.Equal(2, len(c.indexNodes)) err = c.RemoveIndexNode(nil) - assert.NoError(t, err) + s.NoError(err) + + s.Equal(1, c.clusterConfig.IndexNodeNum) + s.Equal(1, len(c.indexNodes)) - assert.Equal(t, 1, c.clusterConfig.IndexNodeNum) - assert.Equal(t, 1, len(c.indexNodes)) } -func TestUpdateClusterSize(t *testing.T) { - ctx := context.Background() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer c.Stop() - assert.NoError(t, err) +func (s *MiniClusterMethodsSuite) TestUpdateClusterSize() { - err = c.UpdateClusterSize(ClusterConfig{ + c := s.Cluster + + err := c.UpdateClusterSize(ClusterConfig{ QueryNodeNum: -1, DataNodeNum: -1, IndexNodeNum: -1, }) - assert.Error(t, err) + s.Error(err) err = c.UpdateClusterSize(ClusterConfig{ QueryNodeNum: 2, DataNodeNum: 2, IndexNodeNum: 2, }) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 2, c.clusterConfig.DataNodeNum) - assert.Equal(t, 2, c.clusterConfig.QueryNodeNum) - assert.Equal(t, 2, c.clusterConfig.IndexNodeNum) + s.Equal(2, c.clusterConfig.DataNodeNum) + s.Equal(2, c.clusterConfig.QueryNodeNum) + s.Equal(2, c.clusterConfig.IndexNodeNum) - assert.Equal(t, 2, len(c.dataNodes)) - assert.Equal(t, 2, len(c.queryNodes)) - assert.Equal(t, 2, len(c.indexNodes)) + s.Equal(2, len(c.dataNodes)) + s.Equal(2, len(c.queryNodes)) + s.Equal(2, len(c.indexNodes)) err = c.UpdateClusterSize(ClusterConfig{ DataNodeNum: 3, QueryNodeNum: 2, IndexNodeNum: 1, }) - assert.NoError(t, err) + s.NoError(err) - assert.Equal(t, 3, c.clusterConfig.DataNodeNum) - assert.Equal(t, 2, c.clusterConfig.QueryNodeNum) - assert.Equal(t, 1, c.clusterConfig.IndexNodeNum) + s.Equal(3, c.clusterConfig.DataNodeNum) + s.Equal(2, c.clusterConfig.QueryNodeNum) + s.Equal(1, c.clusterConfig.IndexNodeNum) - assert.Equal(t, 3, len(c.dataNodes)) - assert.Equal(t, 2, len(c.queryNodes)) - assert.Equal(t, 1, len(c.indexNodes)) + s.Equal(3, len(c.dataNodes)) + s.Equal(2, len(c.queryNodes)) + s.Equal(1, len(c.indexNodes)) +} + +func TestMiniCluster(t *testing.T) { + suite.Run(t, new(MiniClusterMethodsSuite)) } diff --git a/tests/integration/range_search_test.go b/tests/integration/range_search_test.go index cbdcfd3867..13cc962f01 100644 --- a/tests/integration/range_search_test.go +++ b/tests/integration/range_search_test.go @@ -20,10 +20,9 @@ import ( "context" "fmt" "testing" - "time" "github.com/golang/protobuf/proto" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/milvuspb" @@ -35,17 +34,14 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" ) -func TestRangeSearchIP(t *testing.T) { - ctx, cancel := context.WithTimeout(context.Background(), time.Second*180) - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer func() { - err = c.Stop() - assert.NoError(t, err) - cancel() - }() +type RangeSearchSuite struct { + MiniClusterSuite +} + +func (s *RangeSearchSuite) TestRangeSearchIP() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() prefix := "TestRangeSearchIP" dbName := "" @@ -55,7 +51,7 @@ func TestRangeSearchIP(t *testing.T) { schema := constructSchema(collectionName, dim, true) marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -63,7 +59,7 @@ func TestRangeSearchIP(t *testing.T) { Schema: marshaledSchema, ShardsNum: common.DefaultShardsNum, }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(createCollectionStatus) if err != nil { @@ -72,8 +68,8 @@ func TestRangeSearchIP(t *testing.T) { log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.True(t, merr.Ok(showCollectionsResp.GetStatus())) + s.NoError(err) + s.True(merr.Ok(showCollectionsResp.GetStatus())) log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) @@ -85,23 +81,23 @@ func TestRangeSearchIP(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.True(t, merr.Ok(insertResult.GetStatus())) + s.NoError(err) + s.True(merr.Ok(insertResult.GetStatus())) // flush flushResp, err := c.proxy.Flush(ctx, &milvuspb.FlushRequest{ DbName: dbName, CollectionNames: []string{collectionName}, }) - assert.NoError(t, err) + s.NoError(err) segmentIDs, has := flushResp.GetCollSegIDs()[collectionName] - assert.True(t, has) + s.True(has) ids := segmentIDs.GetData() - assert.NotEmpty(t, segmentIDs) + s.NotEmpty(segmentIDs) segments, err := c.metaWatcher.ShowSegments() - assert.NoError(t, err) - assert.NotEmpty(t, segments) + s.NoError(err) + s.NotEmpty(segments) for _, segment := range segments { log.Info("ShowSegments result", zap.String("segment", segment.String())) } @@ -114,19 +110,19 @@ func TestRangeSearchIP(t *testing.T) { IndexName: "_default", ExtraParams: constructIndexParam(dim, IndexFaissIvfFlat, distance.IP), }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(createIndexStatus) if err != nil { log.Warn("createIndexStatus fail reason", zap.Error(err)) } - waitingForIndexBuilt(ctx, c, t, collectionName, floatVecField) + waitingForIndexBuilt(ctx, c, s.T(), collectionName, floatVecField) // load loadStatus, err := c.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(loadStatus) if err != nil { log.Warn("LoadCollection fail reason", zap.Error(err)) @@ -153,7 +149,7 @@ func TestRangeSearchIP(t *testing.T) { if err != nil { log.Warn("searchResult fail reason", zap.Error(err)) } - assert.NoError(t, err) + s.NoError(err) // pass in radius and range_filter when range search params["range_filter"] = filter @@ -166,7 +162,7 @@ func TestRangeSearchIP(t *testing.T) { if err != nil { log.Warn("searchResult fail reason", zap.Error(err)) } - assert.NoError(t, err) + s.NoError(err) // pass in illegal radius and range_filter when range search params["radius"] = filter @@ -180,26 +176,20 @@ func TestRangeSearchIP(t *testing.T) { if err != nil { log.Warn("searchResult fail reason", zap.Error(err)) } - assert.Error(t, err) + s.Error(err) log.Info("=========================") log.Info("=========================") log.Info("TestRangeSearchIP succeed") log.Info("=========================") log.Info("=========================") + } -func TestRangeSearchL2(t *testing.T) { - ctx, cancel := context.WithTimeout(context.Background(), time.Second*180) - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer func() { - err = c.Stop() - assert.NoError(t, err) - cancel() - }() +func (s *RangeSearchSuite) TestRangeSearchL2() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() prefix := "TestRangeSearchL2" dbName := "" @@ -209,7 +199,7 @@ func TestRangeSearchL2(t *testing.T) { schema := constructSchema(collectionName, dim, true) marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -217,7 +207,7 @@ func TestRangeSearchL2(t *testing.T) { Schema: marshaledSchema, ShardsNum: common.DefaultShardsNum, }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(createCollectionStatus) if err != nil { @@ -226,8 +216,8 @@ func TestRangeSearchL2(t *testing.T) { log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.True(t, merr.Ok(showCollectionsResp.GetStatus())) + s.NoError(err) + s.True(merr.Ok(showCollectionsResp.GetStatus())) log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) @@ -239,23 +229,23 @@ func TestRangeSearchL2(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.True(t, merr.Ok(insertResult.GetStatus())) + s.NoError(err) + s.True(merr.Ok(insertResult.GetStatus())) // flush flushResp, err := c.proxy.Flush(ctx, &milvuspb.FlushRequest{ DbName: dbName, CollectionNames: []string{collectionName}, }) - assert.NoError(t, err) + s.NoError(err) segmentIDs, has := flushResp.GetCollSegIDs()[collectionName] - assert.True(t, has) + s.True(has) ids := segmentIDs.GetData() - assert.NotEmpty(t, segmentIDs) + s.NotEmpty(segmentIDs) segments, err := c.metaWatcher.ShowSegments() - assert.NoError(t, err) - assert.NotEmpty(t, segments) + s.NoError(err) + s.NotEmpty(segments) for _, segment := range segments { log.Info("ShowSegments result", zap.String("segment", segment.String())) } @@ -268,19 +258,19 @@ func TestRangeSearchL2(t *testing.T) { IndexName: "_default", ExtraParams: constructIndexParam(dim, IndexFaissIvfFlat, distance.L2), }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(createIndexStatus) if err != nil { log.Warn("createIndexStatus fail reason", zap.Error(err)) } - waitingForIndexBuilt(ctx, c, t, collectionName, floatVecField) + waitingForIndexBuilt(ctx, c, s.T(), collectionName, floatVecField) // load loadStatus, err := c.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(loadStatus) if err != nil { log.Warn("LoadCollection fail reason", zap.Error(err)) @@ -306,7 +296,7 @@ func TestRangeSearchL2(t *testing.T) { if err != nil { log.Warn("searchResult fail reason", zap.Error(err)) } - assert.NoError(t, err) + s.NoError(err) // pass in radius and range_filter when range search params["range_filter"] = filter @@ -319,7 +309,7 @@ func TestRangeSearchL2(t *testing.T) { if err != nil { log.Warn("searchResult fail reason", zap.Error(err)) } - assert.NoError(t, err) + s.NoError(err) // pass in illegal radius and range_filter when range search params["radius"] = filter @@ -333,11 +323,16 @@ func TestRangeSearchL2(t *testing.T) { if err != nil { log.Warn("searchResult fail reason", zap.Error(err)) } - assert.Error(t, err) + s.Error(err) log.Info("=========================") log.Info("=========================") log.Info("TestRangeSearchL2 succeed") log.Info("=========================") log.Info("=========================") + +} + +func TestRangeSearch(t *testing.T) { + suite.Run(t, new(RangeSearchSuite)) } diff --git a/tests/integration/refresh_config_test.go b/tests/integration/refresh_config_test.go index cfd5ce71b7..03d6135caf 100644 --- a/tests/integration/refresh_config_test.go +++ b/tests/integration/refresh_config_test.go @@ -29,62 +29,50 @@ import ( "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/distance" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" "go.uber.org/zap" ) -func TestRefreshPasswordLength(t *testing.T) { - ctx, cancel := context.WithTimeout(context.Background(), time.Second*180) +type RefreshConfigSuite struct { + MiniClusterSuite +} + +func (s *RefreshConfigSuite) TestRefreshPasswordLength() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) defer cancel() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer func() { - err = c.Stop() - assert.NoError(t, err) - cancel() - }() - - s, err := c.proxy.CreateCredential(ctx, &milvuspb.CreateCredentialRequest{ + resp, err := c.proxy.CreateCredential(ctx, &milvuspb.CreateCredentialRequest{ Username: "test", Password: "1234", }) - log.Debug("first create result", zap.Any("state", s)) - assert.Equal(t, commonpb.ErrorCode_IllegalArgument, s.GetErrorCode()) + log.Debug("first create result", zap.Any("state", resp)) + s.Require().NoError(err) + s.Equal(commonpb.ErrorCode_IllegalArgument, resp.GetErrorCode()) params := paramtable.Get() - c.etcdCli.KV.Put(ctx, fmt.Sprintf("%s/config/proxy/minpasswordlength", params.EtcdCfg.RootPath.GetValue()), "3") + key := fmt.Sprintf("%s/config/proxy/minpasswordlength", params.EtcdCfg.RootPath.GetValue()) + c.etcdCli.KV.Put(ctx, key, "3") - assert.Eventually(t, func() bool { - s, err = c.proxy.CreateCredential(ctx, &milvuspb.CreateCredentialRequest{ + s.Eventually(func() bool { + resp, err = c.proxy.CreateCredential(ctx, &milvuspb.CreateCredentialRequest{ Username: "test", Password: "1234", }) - log.Debug("second create result", zap.Any("state", s)) - return commonpb.ErrorCode_Success == s.GetErrorCode() + log.Debug("second create result", zap.Any("state", resp)) + return commonpb.ErrorCode_Success == resp.GetErrorCode() }, time.Second*20, time.Millisecond*500) + } -func TestRefreshDefaultIndexName(t *testing.T) { - ctx, cancel := context.WithTimeout(context.Background(), time.Second*180) +func (s *RefreshConfigSuite) TestRefreshDefaultIndexName() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) defer cancel() - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - - err = c.Start() - assert.NoError(t, err) - defer func() { - err = c.Stop() - assert.NoError(t, err) - cancel() - }() - params := paramtable.Get() c.etcdCli.KV.Put(ctx, fmt.Sprintf("%s/config/common/defaultIndexName", params.EtcdCfg.RootPath.GetValue()), "a_index") - assert.Eventually(t, func() bool { + s.Eventually(func() bool { return params.CommonCfg.DefaultIndexName.GetValue() == "a_index" }, time.Second*10, time.Millisecond*500) @@ -95,6 +83,7 @@ func TestRefreshDefaultIndexName(t *testing.T) { schema := constructSchema("test", 128, true) marshaledSchema, err := proto.Marshal(schema) + s.Require().NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: "default", @@ -102,11 +91,11 @@ func TestRefreshDefaultIndexName(t *testing.T) { Schema: marshaledSchema, ShardsNum: 1, }) - assert.NoError(t, err) + s.NoError(err) if createCollectionStatus.GetErrorCode() != commonpb.ErrorCode_Success { log.Warn("createCollectionStatus fail reason", zap.String("reason", createCollectionStatus.GetReason())) } - assert.Equal(t, createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) + s.Equal(createCollectionStatus.GetErrorCode(), commonpb.ErrorCode_Success) fVecColumn := newFloatVectorFieldData(floatVecField, rowNum, dim) hashKeys := generateHashKeys(rowNum) @@ -117,19 +106,25 @@ func TestRefreshDefaultIndexName(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) + s.NoError(err) _, err = c.proxy.CreateIndex(ctx, &milvuspb.CreateIndexRequest{ CollectionName: collectionName, FieldName: floatVecField, ExtraParams: constructIndexParam(dim, IndexFaissIvfFlat, distance.L2), }) + s.NoError(err) - s, err := c.proxy.DescribeIndex(ctx, &milvuspb.DescribeIndexRequest{ + resp, err := c.proxy.DescribeIndex(ctx, &milvuspb.DescribeIndexRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.Equal(t, commonpb.ErrorCode_Success, s.Status.GetErrorCode()) - assert.Equal(t, 1, len(s.IndexDescriptions)) - assert.Equal(t, "a_index_101", s.IndexDescriptions[0].GetIndexName()) + s.NoError(err) + s.Equal(commonpb.ErrorCode_Success, resp.Status.GetErrorCode()) + s.Equal(1, len(resp.IndexDescriptions)) + s.Equal("a_index_101", resp.IndexDescriptions[0].GetIndexName()) +} + +func TestRefreshConfig(t *testing.T) { + suite.Run(t, new(RefreshConfigSuite)) } diff --git a/tests/integration/suite_test.go b/tests/integration/suite_test.go new file mode 100644 index 0000000000..8415b4a85d --- /dev/null +++ b/tests/integration/suite_test.go @@ -0,0 +1,103 @@ +// Licensed to the LF AI & Data foundation under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package integration + +import ( + "context" + "os" + "strings" + "time" + + "github.com/milvus-io/milvus/pkg/util/etcd" + "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/stretchr/testify/suite" + "go.etcd.io/etcd/server/v3/embed" +) + +// EmbedEtcdSuite contains embed setup & teardown related logic +type EmbedEtcdSuite struct { + EtcdServer *embed.Etcd + EtcdDir string +} + +func (s *EmbedEtcdSuite) SetupEmbedEtcd() error { + server, folder, err := etcd.StartTestEmbedEtcdServer() + if err != nil { + return err + } + + s.EtcdServer = server + s.EtcdDir = folder + + return nil +} + +func (s *EmbedEtcdSuite) TearDownEmbedEtcd() { + if s.EtcdServer != nil { + s.EtcdServer.Server.Stop() + } + if s.EtcdDir != "" { + os.RemoveAll(s.EtcdDir) + } +} + +type MiniClusterSuite struct { + suite.Suite + EmbedEtcdSuite + + Cluster *MiniCluster + cancelFunc context.CancelFunc +} + +func (s *MiniClusterSuite) SetupSuite() { + s.Require().NoError(s.SetupEmbedEtcd()) +} + +func (s *MiniClusterSuite) TearDownSuite() { + s.TearDownEmbedEtcd() +} + +func (s *MiniClusterSuite) SetupTest() { + s.T().Log("Setup test...") + // setup mini cluster to use embed etcd + endpoints := etcd.GetEmbedEtcdEndpoints(s.EtcdServer) + val := strings.Join(endpoints, ",") + // setup env value to init etcd source + s.T().Setenv("etcd.endpoints", val) + + params = paramtable.Get() + + ctx, cancel := context.WithTimeout(context.Background(), time.Second*180) + s.cancelFunc = cancel + c, err := StartMiniCluster(ctx, func(c *MiniCluster) { + // change config etcd endpoints + c.params[params.EtcdCfg.Endpoints.Key] = val + }) + s.Require().NoError(err) + s.Cluster = c + + // start mini cluster + s.Require().NoError(s.Cluster.Start()) +} + +func (s *MiniClusterSuite) TearDownTest() { + s.T().Log("Tear Down test...") + defer s.cancelFunc() + if s.Cluster != nil { + s.Cluster.Stop() + } +} diff --git a/tests/integration/upsert_test.go b/tests/integration/upsert_test.go index 4bdf9f3024..ef13821829 100644 --- a/tests/integration/upsert_test.go +++ b/tests/integration/upsert_test.go @@ -20,7 +20,6 @@ import ( "context" "fmt" "testing" - "time" "github.com/golang/protobuf/proto" "github.com/milvus-io/milvus-proto/go-api/milvuspb" @@ -30,22 +29,18 @@ import ( "github.com/milvus-io/milvus/pkg/util/distance" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" "go.uber.org/zap" ) -func TestUpsert(t *testing.T) { - ctx, cancel := context.WithTimeout(context.Background(), time.Second*180) - c, err := StartMiniCluster(ctx) - assert.NoError(t, err) - err = c.Start() - assert.NoError(t, err) - defer func() { - err = c.Stop() - assert.NoError(t, err) - cancel() - }() - assert.NoError(t, err) +type UpsertSuite struct { + MiniClusterSuite +} + +func (s *UpsertSuite) TestUpsert() { + c := s.Cluster + ctx, cancel := context.WithCancel(c.GetContext()) + defer cancel() prefix := "TestUpsert" dbName := "" @@ -55,7 +50,7 @@ func TestUpsert(t *testing.T) { schema := constructSchema(collectionName, dim, false) marshaledSchema, err := proto.Marshal(schema) - assert.NoError(t, err) + s.NoError(err) createCollectionStatus, err := c.proxy.CreateCollection(ctx, &milvuspb.CreateCollectionRequest{ DbName: dbName, @@ -63,7 +58,7 @@ func TestUpsert(t *testing.T) { Schema: marshaledSchema, ShardsNum: common.DefaultShardsNum, }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(createCollectionStatus) if err != nil { @@ -72,8 +67,8 @@ func TestUpsert(t *testing.T) { log.Info("CreateCollection result", zap.Any("createCollectionStatus", createCollectionStatus)) showCollectionsResp, err := c.proxy.ShowCollections(ctx, &milvuspb.ShowCollectionsRequest{}) - assert.NoError(t, err) - assert.True(t, merr.Ok(showCollectionsResp.GetStatus())) + s.NoError(err) + s.True(merr.Ok(showCollectionsResp.GetStatus())) log.Info("ShowCollections result", zap.Any("showCollectionsResp", showCollectionsResp)) pkFieldData := newInt64FieldData(int64Field, rowNum) @@ -86,23 +81,23 @@ func TestUpsert(t *testing.T) { HashKeys: hashKeys, NumRows: uint32(rowNum), }) - assert.NoError(t, err) - assert.True(t, merr.Ok(upsertResult.GetStatus())) + s.NoError(err) + s.True(merr.Ok(upsertResult.GetStatus())) // flush flushResp, err := c.proxy.Flush(ctx, &milvuspb.FlushRequest{ DbName: dbName, CollectionNames: []string{collectionName}, }) - assert.NoError(t, err) + s.NoError(err) segmentIDs, has := flushResp.GetCollSegIDs()[collectionName] - assert.True(t, has) + s.True(has) ids := segmentIDs.GetData() - assert.NotEmpty(t, segmentIDs) + s.NotEmpty(segmentIDs) segments, err := c.metaWatcher.ShowSegments() - assert.NoError(t, err) - assert.NotEmpty(t, segments) + s.NoError(err) + s.NotEmpty(segments) for _, segment := range segments { log.Info("ShowSegments result", zap.String("segment", segment.String())) } @@ -115,20 +110,20 @@ func TestUpsert(t *testing.T) { IndexName: "_default", ExtraParams: constructIndexParam(dim, IndexFaissIvfFlat, distance.IP), }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(createIndexStatus) if err != nil { log.Warn("createIndexStatus fail reason", zap.Error(err)) } - waitingForIndexBuilt(ctx, c, t, collectionName, floatVecField) + waitingForIndexBuilt(ctx, c, s.T(), collectionName, floatVecField) // load loadStatus, err := c.proxy.LoadCollection(ctx, &milvuspb.LoadCollectionRequest{ DbName: dbName, CollectionName: collectionName, }) - assert.NoError(t, err) + s.NoError(err) err = merr.Error(loadStatus) if err != nil { log.Warn("LoadCollection fail reason", zap.Error(err)) @@ -150,11 +145,16 @@ func TestUpsert(t *testing.T) { if err != nil { log.Warn("searchResult fail reason", zap.Error(err)) } - assert.NoError(t, err) + s.NoError(err) log.Info("==================") log.Info("==================") log.Info("TestUpsert succeed") log.Info("==================") log.Info("==================") + +} + +func TestUpsert(t *testing.T) { + suite.Run(t, new(UpsertSuite)) }