mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-07 01:28:27 +08:00
Remove mysql metastore (#26633)
Signed-off-by: jaime <yun.zhang@zilliz.com>
This commit is contained in:
parent
64a9762cf3
commit
c603f1c244
@ -103,18 +103,14 @@ pipeline {
|
|||||||
script {
|
script {
|
||||||
sh 'printenv'
|
sh 'printenv'
|
||||||
def clusterEnabled = "false"
|
def clusterEnabled = "false"
|
||||||
def mysqlEnabled = "false"
|
|
||||||
// def setMemoryResourceLimitArgs="--set standalone.resources.limits.memory=4Gi"
|
// def setMemoryResourceLimitArgs="--set standalone.resources.limits.memory=4Gi"
|
||||||
def mqMode='pulsar' // default using is pulsar
|
def mqMode='pulsar' // default using is pulsar
|
||||||
def mysql_architecture = "standalone"
|
|
||||||
def authenticationEnabled = "false"
|
def authenticationEnabled = "false"
|
||||||
if ("${MILVUS_SERVER_TYPE}" == "distributed-pulsar") {
|
if ("${MILVUS_SERVER_TYPE}" == "distributed-pulsar") {
|
||||||
clusterEnabled = "true"
|
clusterEnabled = "true"
|
||||||
} else if ("${MILVUS_SERVER_TYPE}" == "distributed-kafka") {
|
} else if ("${MILVUS_SERVER_TYPE}" == "distributed-kafka") {
|
||||||
clusterEnabled = "true"
|
clusterEnabled = "true"
|
||||||
// mysqlEnabled = "true"
|
|
||||||
mqMode='kafka'
|
mqMode='kafka'
|
||||||
// mysql_architecture = "replication"
|
|
||||||
} else if("${MILVUS_SERVER_TYPE}" == "standalone-authentication") {
|
} else if("${MILVUS_SERVER_TYPE}" == "standalone-authentication") {
|
||||||
authenticationEnabled = "true"
|
authenticationEnabled = "true"
|
||||||
}
|
}
|
||||||
@ -148,8 +144,6 @@ pipeline {
|
|||||||
--set queryNode.replicas=2 \
|
--set queryNode.replicas=2 \
|
||||||
--set indexNode.replicas=2 \
|
--set indexNode.replicas=2 \
|
||||||
--set dataNode.replicas=2 \
|
--set dataNode.replicas=2 \
|
||||||
--set mysql.enabled=${mysqlEnabled} \
|
|
||||||
--set mysql.architecture=${mysql_architecture} \
|
|
||||||
--set dataCoordinator.gc.missingTolerance=86400 \
|
--set dataCoordinator.gc.missingTolerance=86400 \
|
||||||
--set dataCoordinator.gc.dropTolerance=86400 \
|
--set dataCoordinator.gc.dropTolerance=86400 \
|
||||||
--set indexCoordinator.gc.interval=1 \
|
--set indexCoordinator.gc.interval=1 \
|
||||||
@ -167,7 +161,6 @@ pipeline {
|
|||||||
--set common.security.authorizationEnabled=${authenticationEnabled} \
|
--set common.security.authorizationEnabled=${authenticationEnabled} \
|
||||||
--version ${chart_version} \
|
--version ${chart_version} \
|
||||||
-f values/${mqMode}.yaml \
|
-f values/${mqMode}.yaml \
|
||||||
-f values/mysql.yaml \
|
|
||||||
-f values/ci/nightly.yaml "
|
-f values/ci/nightly.yaml "
|
||||||
"""
|
"""
|
||||||
}
|
}
|
||||||
|
|||||||
@ -206,10 +206,6 @@ func WriteYaml() {
|
|||||||
{
|
{
|
||||||
name: "metastore",
|
name: "metastore",
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "mysql",
|
|
||||||
header: "\n# Related configuration of mysql, used to store Milvus metadata.",
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "localStorage",
|
name: "localStorage",
|
||||||
},
|
},
|
||||||
|
|||||||
@ -66,7 +66,6 @@ func (c *RunConfig) init(base *paramtable.BaseTable) {
|
|||||||
type MilvusConfig struct {
|
type MilvusConfig struct {
|
||||||
MetaStoreCfg *paramtable.MetaStoreConfig
|
MetaStoreCfg *paramtable.MetaStoreConfig
|
||||||
EtcdCfg *paramtable.EtcdConfig
|
EtcdCfg *paramtable.EtcdConfig
|
||||||
MysqlCfg *paramtable.MetaDBConfig
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newMilvusConfig(base *paramtable.BaseTable) *MilvusConfig {
|
func newMilvusConfig(base *paramtable.BaseTable) *MilvusConfig {
|
||||||
@ -78,16 +77,8 @@ func newMilvusConfig(base *paramtable.BaseTable) *MilvusConfig {
|
|||||||
func (c *MilvusConfig) init(base *paramtable.BaseTable) {
|
func (c *MilvusConfig) init(base *paramtable.BaseTable) {
|
||||||
c.MetaStoreCfg = ¶mtable.MetaStoreConfig{}
|
c.MetaStoreCfg = ¶mtable.MetaStoreConfig{}
|
||||||
c.EtcdCfg = ¶mtable.EtcdConfig{}
|
c.EtcdCfg = ¶mtable.EtcdConfig{}
|
||||||
c.MysqlCfg = ¶mtable.MetaDBConfig{}
|
|
||||||
|
|
||||||
c.MetaStoreCfg.Init(base)
|
c.MetaStoreCfg.Init(base)
|
||||||
|
|
||||||
switch c.MetaStoreCfg.MetaStoreType.GetValue() {
|
|
||||||
case util.MetaStoreTypeMysql:
|
|
||||||
c.MysqlCfg.Init(base)
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
|
|
||||||
c.EtcdCfg.Init(base)
|
c.EtcdCfg.Init(base)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -183,7 +183,7 @@ func combineToCollectionIndexesMeta220(fieldIndexes FieldIndexes210, collectionI
|
|||||||
newIndexName = "_default_idx_" + strconv.FormatInt(index.GetFiledID(), 10)
|
newIndexName = "_default_idx_" + strconv.FormatInt(index.GetFiledID(), 10)
|
||||||
}
|
}
|
||||||
record := &model.Index{
|
record := &model.Index{
|
||||||
TenantID: "", // TODO: how to set this if we support mysql later?
|
TenantID: "",
|
||||||
CollectionID: collectionID,
|
CollectionID: collectionID,
|
||||||
FieldID: index.GetFiledID(),
|
FieldID: index.GetFiledID(),
|
||||||
IndexID: index.GetIndexID(),
|
IndexID: index.GetIndexID(),
|
||||||
|
|||||||
@ -45,19 +45,9 @@ etcd:
|
|||||||
|
|
||||||
metastore:
|
metastore:
|
||||||
# Default value: etcd
|
# Default value: etcd
|
||||||
# Valid values: [etcd, mysql]
|
# Valid values: etcd
|
||||||
type: etcd
|
type: etcd
|
||||||
|
|
||||||
# Related configuration of mysql, used to store Milvus metadata.
|
|
||||||
mysql:
|
|
||||||
username: root
|
|
||||||
password: 123456
|
|
||||||
address: localhost
|
|
||||||
port: 3306
|
|
||||||
dbName: milvus_meta
|
|
||||||
maxOpenConns: 20
|
|
||||||
maxIdleConns: 5
|
|
||||||
|
|
||||||
localStorage:
|
localStorage:
|
||||||
path: /var/lib/milvus/data/ # please adjust in embedded Milvus: /tmp/milvus/data/
|
path: /var/lib/milvus/data/ # please adjust in embedded Milvus: /tmp/milvus/data/
|
||||||
|
|
||||||
|
|||||||
2
go.mod
2
go.mod
@ -53,7 +53,6 @@ require (
|
|||||||
golang.org/x/text v0.9.0
|
golang.org/x/text v0.9.0
|
||||||
google.golang.org/grpc v1.54.0
|
google.golang.org/grpc v1.54.0
|
||||||
google.golang.org/grpc/examples v0.0.0-20220617181431-3e7b97febc7f
|
google.golang.org/grpc/examples v0.0.0-20220617181431-3e7b97febc7f
|
||||||
gorm.io/driver/mysql v1.3.5
|
|
||||||
gorm.io/gorm v1.23.8
|
gorm.io/gorm v1.23.8
|
||||||
stathat.com/c/consistent v1.0.0
|
stathat.com/c/consistent v1.0.0
|
||||||
)
|
)
|
||||||
@ -105,7 +104,6 @@ require (
|
|||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.14.0 // indirect
|
github.com/go-playground/validator/v10 v10.14.0 // indirect
|
||||||
github.com/go-sql-driver/mysql v1.6.0 // indirect
|
|
||||||
github.com/goccy/go-json v0.10.2 // indirect
|
github.com/goccy/go-json v0.10.2 // indirect
|
||||||
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect
|
github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect
|
||||||
github.com/godbus/dbus/v5 v5.0.4 // indirect
|
github.com/godbus/dbus/v5 v5.0.4 // indirect
|
||||||
|
|||||||
4
go.sum
4
go.sum
@ -288,8 +288,6 @@ github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91
|
|||||||
github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js=
|
github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js=
|
||||||
github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
|
||||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||||
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
|
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
|
||||||
@ -1449,8 +1447,6 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C
|
|||||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gorm.io/driver/mysql v1.3.5 h1:iWBTVW/8Ij5AG4e0G/zqzaJblYkBI1VIL1LG2HUGsvY=
|
|
||||||
gorm.io/driver/mysql v1.3.5/go.mod h1:sSIebwZAVPiT+27jK9HIwvsqOGKx3YMPmrA3mBJR10c=
|
|
||||||
gorm.io/gorm v1.23.8 h1:h8sGJ+biDgBA1AD1Ha9gFCx7h8npU7AsLdlkX0n2TpE=
|
gorm.io/gorm v1.23.8 h1:h8sGJ+biDgBA1AD1Ha9gFCx7h8npU7AsLdlkX0n2TpE=
|
||||||
gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
|
gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
|||||||
@ -1,119 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
"gorm.io/gorm/clause"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type collectionDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collectionDb) GetCollectionIDTs(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) (*dbmodel.Collection, error) {
|
|
||||||
var col dbmodel.Collection
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.Collection{}).Select("collection_id, ts").Where("tenant_id = ? AND collection_id = ? AND ts <= ?", tenantID, collectionID, ts).Order("ts desc").Take(&col).Error
|
|
||||||
|
|
||||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
|
||||||
log.Warn("record not found", zap.Int64("collectionID", collectionID), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return nil, fmt.Errorf("record not found, collID=%d, ts=%d", collectionID, ts)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get collection ts failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collectionID), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &col, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collectionDb) ListCollectionIDTs(tenantID string, ts typeutil.Timestamp) ([]*dbmodel.Collection, error) {
|
|
||||||
var r []*dbmodel.Collection
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.Collection{}).Select("collection_id, MAX(ts) ts").Where("tenant_id = ? AND ts <= ?", tenantID, ts).Group("collection_id").Find(&r).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("list collection_id & latest ts pairs in collections failed", zap.String("tenant", tenantID), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collectionDb) Get(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) (*dbmodel.Collection, error) {
|
|
||||||
var r dbmodel.Collection
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.Collection{}).Where("tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false", tenantID, collectionID, ts).Take(&r).Error
|
|
||||||
|
|
||||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
|
||||||
return nil, fmt.Errorf("collection not found, collID=%d, ts=%d", collectionID, ts)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get collection by collection_id and ts failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collectionID), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collectionDb) GetCollectionIDByName(tenantID string, collectionName string, ts typeutil.Timestamp) (typeutil.UniqueID, error) {
|
|
||||||
var r dbmodel.Collection
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.Collection{}).Select("collection_id").Where("tenant_id = ? AND collection_name = ? AND ts <= ?", tenantID, collectionName, ts).Order("ts desc").Take(&r).Error
|
|
||||||
|
|
||||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
|
||||||
return 0, fmt.Errorf("get collection_id by collection_name not found, collName=%s, ts=%d", collectionName, ts)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get collection_id by collection_name failed", zap.String("tenant", tenantID), zap.String("collName", collectionName), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r.CollectionID, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert used in create & drop collection, needs be an idempotent operation, so we use DoNothing strategy here so it will not throw exception for retry, equivalent to kv catalog
|
|
||||||
func (s *collectionDb) Insert(in *dbmodel.Collection) error {
|
|
||||||
err := s.db.Clauses(clause.OnConflict{
|
|
||||||
// constraint UNIQUE (tenant_id, collection_id, ts)
|
|
||||||
DoNothing: true,
|
|
||||||
}).Create(&in).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert collection failed", zap.String("tenant", in.TenantID), zap.Int64("collectionID", in.CollectionID), zap.Uint64("ts", in.Ts), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateCollectionUpdatesWithoutID(in *dbmodel.Collection) map[string]interface{} {
|
|
||||||
ret := map[string]interface{}{
|
|
||||||
"tenant_id": in.TenantID,
|
|
||||||
"collection_id": in.CollectionID,
|
|
||||||
"collection_name": in.CollectionName,
|
|
||||||
"description": in.Description,
|
|
||||||
"auto_id": in.AutoID,
|
|
||||||
"shards_num": in.ShardsNum,
|
|
||||||
"start_position": in.StartPosition,
|
|
||||||
"consistency_level": in.ConsistencyLevel,
|
|
||||||
"status": in.Status,
|
|
||||||
"properties": in.Properties,
|
|
||||||
"ts": in.Ts,
|
|
||||||
"is_deleted": in.IsDeleted,
|
|
||||||
"created_at": in.CreatedAt,
|
|
||||||
"updated_at": in.UpdatedAt,
|
|
||||||
}
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collectionDb) Update(in *dbmodel.Collection) error {
|
|
||||||
updates := generateCollectionUpdatesWithoutID(in)
|
|
||||||
return s.db.Model(&dbmodel.Collection{}).Where("id = ?", in.ID).Updates(updates).Error
|
|
||||||
}
|
|
||||||
@ -1,79 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
"gorm.io/gorm/clause"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type collAliasDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collAliasDb) Insert(in []*dbmodel.CollectionAlias) error {
|
|
||||||
err := s.db.Clauses(clause.OnConflict{
|
|
||||||
// constraint UNIQUE (tenant_id, collection_alias, ts)
|
|
||||||
DoNothing: true,
|
|
||||||
}).Create(&in).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert collection alias failed", zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collAliasDb) GetCollectionIDByAlias(tenantID string, alias string, ts typeutil.Timestamp) (typeutil.UniqueID, error) {
|
|
||||||
var r dbmodel.CollectionAlias
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.CollectionAlias{}).Select("collection_id").Where("tenant_id = ? AND collection_alias = ? AND ts <= ?", tenantID, alias, ts).Order("ts desc").Take(&r).Error
|
|
||||||
|
|
||||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
|
||||||
return 0, fmt.Errorf("get collection_id by alias not found, alias=%s, ts=%d", alias, ts)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get collection_id by alias failed", zap.String("tenant", tenantID), zap.String("alias", alias), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r.CollectionID, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collAliasDb) ListCollectionIDTs(tenantID string, ts typeutil.Timestamp) ([]*dbmodel.CollectionAlias, error) {
|
|
||||||
var r []*dbmodel.CollectionAlias
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.CollectionAlias{}).Select("collection_id, MAX(ts) ts").Where("tenant_id = ? AND ts <= ?", tenantID, ts).Group("collection_id").Find(&r).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("list collection_id & latest ts pairs in collection_aliases failed", zap.String("tenant", tenantID), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collAliasDb) List(tenantID string, cidTsPairs []*dbmodel.CollectionAlias) ([]*dbmodel.CollectionAlias, error) {
|
|
||||||
var collAliases []*dbmodel.CollectionAlias
|
|
||||||
|
|
||||||
inValues := make([][]interface{}, 0, len(cidTsPairs))
|
|
||||||
for _, pair := range cidTsPairs {
|
|
||||||
in := []interface{}{pair.CollectionID, pair.Ts}
|
|
||||||
inValues = append(inValues, in)
|
|
||||||
}
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.CollectionAlias{}).Select("collection_id, collection_alias").
|
|
||||||
Where("tenant_id = ? AND is_deleted = false AND (collection_id, ts) IN ?", tenantID, inValues).Find(&collAliases).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("list alias by collection_id and alias pairs failed", zap.String("tenant", tenantID), zap.Any("collIdTs", inValues), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return collAliases, nil
|
|
||||||
}
|
|
||||||
@ -1,205 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestCollectionAlias_Insert(t *testing.T) {
|
|
||||||
var collAliases = []*dbmodel.CollectionAlias{
|
|
||||||
{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionAlias: "test_alias_1",
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `collection_aliases` (`tenant_id`,`collection_id`,`collection_alias`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE `id`=`id`").
|
|
||||||
WithArgs(collAliases[0].TenantID, collAliases[0].CollectionID, collAliases[0].CollectionAlias, collAliases[0].Ts, collAliases[0].IsDeleted, collAliases[0].CreatedAt, collAliases[0].UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(100, 2))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := aliasTestDb.Insert(collAliases)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionAlias_Insert_Error(t *testing.T) {
|
|
||||||
var collAliases = []*dbmodel.CollectionAlias{
|
|
||||||
{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionAlias: "test_alias_1",
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `collection_aliases` (`tenant_id`,`collection_id`,`collection_alias`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE `id`=`id`").
|
|
||||||
WithArgs(collAliases[0].TenantID, collAliases[0].CollectionID, collAliases[0].CollectionAlias, collAliases[0].Ts, collAliases[0].IsDeleted, collAliases[0].CreatedAt, collAliases[0].UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := aliasTestDb.Insert(collAliases)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionAlias_GetCollectionIDByName(t *testing.T) {
|
|
||||||
alias := "test_alias_name_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT `collection_id` FROM `collection_aliases` WHERE tenant_id = ? AND collection_alias = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, alias, ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"collection_id"}).
|
|
||||||
AddRow(collID1))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := aliasTestDb.GetCollectionIDByAlias(tenantID, alias, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collID1, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionAlias_GetCollectionIDByName_Error(t *testing.T) {
|
|
||||||
alias := "test_alias_name_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT `collection_id` FROM `collection_aliases` WHERE tenant_id = ? AND collection_alias = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, alias, ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := aliasTestDb.GetCollectionIDByAlias(tenantID, alias, ts)
|
|
||||||
assert.Equal(t, typeutil.UniqueID(0), res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionAlias_GetCollectionIDByName_ErrRecordNotFound(t *testing.T) {
|
|
||||||
alias := "test_alias_name_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT `collection_id` FROM `collection_aliases` WHERE tenant_id = ? AND collection_alias = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, alias, ts).
|
|
||||||
WillReturnError(gorm.ErrRecordNotFound)
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := aliasTestDb.GetCollectionIDByAlias(tenantID, alias, ts)
|
|
||||||
assert.Equal(t, typeutil.UniqueID(0), res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionAlias_ListCidTs(t *testing.T) {
|
|
||||||
var collAliases = []*dbmodel.CollectionAlias{
|
|
||||||
{
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: typeutil.Timestamp(2),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
CollectionID: collID2,
|
|
||||||
Ts: typeutil.Timestamp(5),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, MAX(ts) ts FROM `collection_aliases` WHERE tenant_id = ? AND ts <= ? GROUP BY `collection_id`").
|
|
||||||
WithArgs(tenantID, ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"collection_id", "ts"}).
|
|
||||||
AddRow(collID1, typeutil.Timestamp(2)).
|
|
||||||
AddRow(collID2, typeutil.Timestamp(5)))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := aliasTestDb.ListCollectionIDTs(tenantID, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collAliases, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionAlias_ListCidTs_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, MAX(ts) ts FROM `collection_aliases` WHERE tenant_id = ? AND ts <= ? GROUP BY `collection_id`").
|
|
||||||
WithArgs(tenantID, ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := aliasTestDb.ListCollectionIDTs(tenantID, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionAlias_List(t *testing.T) {
|
|
||||||
var cidTsPairs = []*dbmodel.CollectionAlias{
|
|
||||||
{
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: typeutil.Timestamp(2),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
CollectionID: collID2,
|
|
||||||
Ts: typeutil.Timestamp(5),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
var out = []*dbmodel.CollectionAlias{
|
|
||||||
{
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionAlias: "test_alias_1",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
CollectionID: collID2,
|
|
||||||
CollectionAlias: "test_alias_2",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, collection_alias FROM `collection_aliases` WHERE tenant_id = ? AND is_deleted = false AND (collection_id, ts) IN ((?,?),(?,?))").
|
|
||||||
WithArgs(tenantID, cidTsPairs[0].CollectionID, cidTsPairs[0].Ts, cidTsPairs[1].CollectionID, cidTsPairs[1].Ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"collection_id", "collection_alias"}).
|
|
||||||
AddRow(collID1, "test_alias_1").
|
|
||||||
AddRow(collID2, "test_alias_2"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := aliasTestDb.List(tenantID, cidTsPairs)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, out, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionAlias_List_Error(t *testing.T) {
|
|
||||||
var cidTsPairs = []*dbmodel.CollectionAlias{
|
|
||||||
{
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: typeutil.Timestamp(2),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
CollectionID: collID2,
|
|
||||||
Ts: typeutil.Timestamp(5),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, collection_alias FROM `collection_aliases` WHERE tenant_id = ? AND is_deleted = false AND (collection_id, ts) IN ((?,?),(?,?))").
|
|
||||||
WithArgs(tenantID, cidTsPairs[0].CollectionID, cidTsPairs[0].Ts, cidTsPairs[1].CollectionID, cidTsPairs[1].Ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := aliasTestDb.List(tenantID, cidTsPairs)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type collChannelDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collChannelDb) GetByCollectionID(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) ([]*dbmodel.CollectionChannel, error) {
|
|
||||||
var r []*dbmodel.CollectionChannel
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.CollectionChannel{}).Where("tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false", tenantID, collectionID, ts).Find(&r).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get channels by collection_id and ts failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collectionID), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *collChannelDb) Insert(in []*dbmodel.CollectionChannel) error {
|
|
||||||
err := s.db.CreateInBatches(in, 100).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert channel failed", zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@ -1,106 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestCollectionChannel_GetByCollID(t *testing.T) {
|
|
||||||
var collChannels = []*dbmodel.CollectionChannel{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
CollectionID: collID1,
|
|
||||||
VirtualChannelName: "test_virtual_channel_1",
|
|
||||||
PhysicalChannelName: "test_physical_channel_1",
|
|
||||||
Removed: false,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `collection_channels` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "collection_id", "virtual_channel_name", "physical_channel_name", "removed", "ts", "is_deleted", "created_at", "updated_at"}).
|
|
||||||
AddRow(collChannels[0].TenantID, collChannels[0].CollectionID, collChannels[0].VirtualChannelName, collChannels[0].PhysicalChannelName, collChannels[0].Removed, collChannels[0].Ts, collChannels[0].IsDeleted, collChannels[0].CreatedAt, collChannels[0].UpdatedAt))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := channelTestDb.GetByCollectionID(tenantID, collID1, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collChannels, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionChannel_GetByCollID_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `collection_channels` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := channelTestDb.GetByCollectionID(tenantID, collID1, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionChannel_Insert(t *testing.T) {
|
|
||||||
var collChannels = []*dbmodel.CollectionChannel{
|
|
||||||
{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
VirtualChannelName: "test_virtual_channel_1",
|
|
||||||
PhysicalChannelName: "test_physical_channel_1",
|
|
||||||
Removed: false,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `collection_channels` (`tenant_id`,`collection_id`,`virtual_channel_name`,`physical_channel_name`,`removed`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(collChannels[0].TenantID, collChannels[0].CollectionID, collChannels[0].VirtualChannelName, collChannels[0].PhysicalChannelName, collChannels[0].Removed, collChannels[0].Ts, collChannels[0].IsDeleted, collChannels[0].CreatedAt, collChannels[0].UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := channelTestDb.Insert(collChannels)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollectionChannel_Insert_Error(t *testing.T) {
|
|
||||||
var collChannels = []*dbmodel.CollectionChannel{
|
|
||||||
{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
VirtualChannelName: "test_virtual_channel_1",
|
|
||||||
PhysicalChannelName: "test_physical_channel_1",
|
|
||||||
Removed: false,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `collection_channels` (`tenant_id`,`collection_id`,`virtual_channel_name`,`physical_channel_name`,`removed`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(collChannels[0].TenantID, collChannels[0].CollectionID, collChannels[0].VirtualChannelName, collChannels[0].PhysicalChannelName, collChannels[0].Removed, collChannels[0].Ts, collChannels[0].IsDeleted, collChannels[0].CreatedAt, collChannels[0].UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := channelTestDb.Insert(collChannels)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,495 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"database/sql"
|
|
||||||
"database/sql/driver"
|
|
||||||
"os"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/pkg/common"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
commonpb "github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbcore"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"gorm.io/driver/mysql"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
tenantID = "test_tenant"
|
|
||||||
noTs = typeutil.Timestamp(0)
|
|
||||||
ts = typeutil.Timestamp(10)
|
|
||||||
collID1 = typeutil.UniqueID(101)
|
|
||||||
collID2 = typeutil.UniqueID(102)
|
|
||||||
fieldID1 = typeutil.UniqueID(501)
|
|
||||||
indexID1 = typeutil.UniqueID(1001)
|
|
||||||
indexID2 = typeutil.UniqueID(1002)
|
|
||||||
segmentID1 = typeutil.UniqueID(2001)
|
|
||||||
segmentID2 = typeutil.UniqueID(2002)
|
|
||||||
partitionID1 = typeutil.UniqueID(3001)
|
|
||||||
indexBuildID1 = typeutil.UniqueID(5001)
|
|
||||||
NumRows = 1025
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
mock sqlmock.Sqlmock
|
|
||||||
collTestDb dbmodel.ICollectionDb
|
|
||||||
aliasTestDb dbmodel.ICollAliasDb
|
|
||||||
channelTestDb dbmodel.ICollChannelDb
|
|
||||||
fieldTestDb dbmodel.IFieldDb
|
|
||||||
partitionTestDb dbmodel.IPartitionDb
|
|
||||||
indexTestDb dbmodel.IIndexDb
|
|
||||||
segIndexTestDb dbmodel.ISegmentIndexDb
|
|
||||||
userTestDb dbmodel.IUserDb
|
|
||||||
roleTestDb dbmodel.IRoleDb
|
|
||||||
userRoleTestDb dbmodel.IUserRoleDb
|
|
||||||
grantTestDb dbmodel.IGrantDb
|
|
||||||
grantIDTestDb dbmodel.IGrantIDDb
|
|
||||||
|
|
||||||
properties = []*commonpb.KeyValuePair{
|
|
||||||
{
|
|
||||||
Key: common.CollectionTTLConfigKey,
|
|
||||||
Value: "3600",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
propertiesStr, _ = dbmodel.MarshalProperties(properties)
|
|
||||||
)
|
|
||||||
|
|
||||||
// TestMain is the first function executed in current package, we will do some initial here
|
|
||||||
func TestMain(m *testing.M) {
|
|
||||||
var (
|
|
||||||
db *sql.DB
|
|
||||||
err error
|
|
||||||
ctx = context.TODO()
|
|
||||||
)
|
|
||||||
|
|
||||||
// setting sql MUST exact match
|
|
||||||
db, mock, err = sqlmock.New(sqlmock.QueryMatcherOption(sqlmock.QueryMatcherEqual))
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
DB, err := gorm.Open(mysql.New(mysql.Config{
|
|
||||||
Conn: db,
|
|
||||||
SkipInitializeWithVersion: true,
|
|
||||||
}), &gorm.Config{})
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// set mocked database
|
|
||||||
dbcore.SetGlobalDB(DB)
|
|
||||||
|
|
||||||
collTestDb = NewMetaDomain().CollectionDb(ctx)
|
|
||||||
aliasTestDb = NewMetaDomain().CollAliasDb(ctx)
|
|
||||||
channelTestDb = NewMetaDomain().CollChannelDb(ctx)
|
|
||||||
fieldTestDb = NewMetaDomain().FieldDb(ctx)
|
|
||||||
partitionTestDb = NewMetaDomain().PartitionDb(ctx)
|
|
||||||
indexTestDb = NewMetaDomain().IndexDb(ctx)
|
|
||||||
segIndexTestDb = NewMetaDomain().SegmentIndexDb(ctx)
|
|
||||||
userTestDb = NewMetaDomain().UserDb(ctx)
|
|
||||||
roleTestDb = NewMetaDomain().RoleDb(ctx)
|
|
||||||
userRoleTestDb = NewMetaDomain().UserRoleDb(ctx)
|
|
||||||
grantTestDb = NewMetaDomain().GrantDb(ctx)
|
|
||||||
grantIDTestDb = NewMetaDomain().GrantIDDb(ctx)
|
|
||||||
|
|
||||||
// m.Run entry for executing tests
|
|
||||||
os.Exit(m.Run())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Notice: sql must be exactly matched, we can use debug() to print the sql
|
|
||||||
|
|
||||||
func TestCollection_GetCidTs_Ts0(t *testing.T) {
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: noTs,
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, ts FROM `collections` WHERE tenant_id = ? AND collection_id = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, collID1, noTs).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"collection_id", "ts"}).
|
|
||||||
AddRow(collID1, noTs))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.GetCollectionIDTs(tenantID, collID1, noTs)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collection, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_GetCidTs_TsNot0(t *testing.T) {
|
|
||||||
resultTs := typeutil.Timestamp(2)
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: resultTs,
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, ts FROM `collections` WHERE tenant_id = ? AND collection_id = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"collection_id", "ts"}).
|
|
||||||
AddRow(collID1, resultTs))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.GetCollectionIDTs(tenantID, collID1, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collection, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_GetCidTs_TsNot0_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, ts FROM `collections` WHERE tenant_id = ? AND collection_id = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.GetCollectionIDTs(tenantID, collID1, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_GetCidTs_TsNot0_ErrRecordNotFound(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, ts FROM `collections` WHERE tenant_id = ? AND collection_id = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnError(gorm.ErrRecordNotFound)
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.GetCollectionIDTs(tenantID, collID1, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_ListCidTs_TsNot0(t *testing.T) {
|
|
||||||
var collection = []*dbmodel.Collection{
|
|
||||||
{
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: typeutil.Timestamp(2),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
CollectionID: collID2,
|
|
||||||
Ts: typeutil.Timestamp(5),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, MAX(ts) ts FROM `collections` WHERE tenant_id = ? AND ts <= ? GROUP BY `collection_id`").
|
|
||||||
WithArgs(tenantID, ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"collection_id", "ts"}).
|
|
||||||
AddRow(collID1, typeutil.Timestamp(2)).
|
|
||||||
AddRow(collID2, typeutil.Timestamp(5)))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.ListCollectionIDTs(tenantID, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collection, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_ListCidTs_TsNot0_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, MAX(ts) ts FROM `collections` WHERE tenant_id = ? AND ts <= ? GROUP BY `collection_id`").
|
|
||||||
WithArgs(tenantID, ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.ListCollectionIDTs(tenantID, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_ListCidTs_Ts0(t *testing.T) {
|
|
||||||
var collection = []*dbmodel.Collection{
|
|
||||||
{
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: noTs,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
CollectionID: collID2,
|
|
||||||
Ts: noTs,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT collection_id, MAX(ts) ts FROM `collections` WHERE tenant_id = ? AND ts <= ? GROUP BY `collection_id`").
|
|
||||||
WithArgs(tenantID, noTs).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"collection_id", "ts"}).
|
|
||||||
AddRow(collID1, noTs).
|
|
||||||
AddRow(collID2, noTs))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.ListCollectionIDTs(tenantID, noTs)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collection, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_Get(t *testing.T) {
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionName: "test_collection_name_1",
|
|
||||||
Description: "",
|
|
||||||
AutoID: false,
|
|
||||||
ShardsNum: common.DefaultShardsNum,
|
|
||||||
StartPosition: "",
|
|
||||||
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
|
|
||||||
Ts: ts,
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `collections` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false LIMIT 1").
|
|
||||||
WithArgs(tenantID, collection.CollectionID, collection.Ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "collection_id", "collection_name", "description", "auto_id", "shards_num", "start_position", "consistency_level", "ts"}).
|
|
||||||
AddRow(collection.TenantID, collection.CollectionID, collection.CollectionName, collection.Description, collection.AutoID, collection.ShardsNum, collection.StartPosition, collection.ConsistencyLevel, collection.Ts))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.Get(tenantID, collID1, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collection, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_Get_Error(t *testing.T) {
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionName: "test_collection_name_1",
|
|
||||||
Description: "",
|
|
||||||
AutoID: false,
|
|
||||||
ShardsNum: common.DefaultShardsNum,
|
|
||||||
StartPosition: "",
|
|
||||||
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
|
|
||||||
Ts: ts,
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `collections` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false LIMIT 1").
|
|
||||||
WithArgs(tenantID, collection.CollectionID, collection.Ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.Get(tenantID, collID1, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_Get_ErrRecordNotFound(t *testing.T) {
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionName: "test_collection_name_1",
|
|
||||||
Description: "",
|
|
||||||
AutoID: false,
|
|
||||||
ShardsNum: common.DefaultShardsNum,
|
|
||||||
StartPosition: "",
|
|
||||||
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
|
|
||||||
Ts: ts,
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `collections` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false LIMIT 1").
|
|
||||||
WithArgs(tenantID, collection.CollectionID, collection.Ts).
|
|
||||||
WillReturnError(gorm.ErrRecordNotFound)
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.Get(tenantID, collID1, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_GetCollectionIDByName(t *testing.T) {
|
|
||||||
collectionName := "test_collection_name_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT `collection_id` FROM `collections` WHERE tenant_id = ? AND collection_name = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, collectionName, ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"collection_id"}).
|
|
||||||
AddRow(collID1))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.GetCollectionIDByName(tenantID, collectionName, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, collID1, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_GetCollectionIDByName_Error(t *testing.T) {
|
|
||||||
collectionName := "test_collection_name_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT `collection_id` FROM `collections` WHERE tenant_id = ? AND collection_name = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, collectionName, ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.GetCollectionIDByName(tenantID, collectionName, ts)
|
|
||||||
assert.Equal(t, typeutil.UniqueID(0), res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_GetCollectionIDByName_ErrRecordNotFound(t *testing.T) {
|
|
||||||
collectionName := "test_collection_name_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT `collection_id` FROM `collections` WHERE tenant_id = ? AND collection_name = ? AND ts <= ? ORDER BY ts desc LIMIT 1").
|
|
||||||
WithArgs(tenantID, collectionName, ts).
|
|
||||||
WillReturnError(gorm.ErrRecordNotFound)
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := collTestDb.GetCollectionIDByName(tenantID, collectionName, ts)
|
|
||||||
assert.Equal(t, typeutil.UniqueID(0), res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_Insert(t *testing.T) {
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionName: "test_collection_name_1",
|
|
||||||
Description: "",
|
|
||||||
AutoID: false,
|
|
||||||
ShardsNum: common.DefaultShardsNum,
|
|
||||||
StartPosition: "",
|
|
||||||
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
|
|
||||||
Properties: propertiesStr,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `collections` (`tenant_id`,`collection_id`,`collection_name`,`description`,`auto_id`,`shards_num`,`start_position`,`consistency_level`,`status`,`properties`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE `id`=`id`").
|
|
||||||
WithArgs(collection.TenantID, collection.CollectionID, collection.CollectionName, collection.Description, collection.AutoID, collection.ShardsNum, collection.StartPosition, collection.ConsistencyLevel, collection.Status, collection.Properties, collection.Ts, collection.IsDeleted, collection.CreatedAt, collection.UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := collTestDb.Insert(collection)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCollection_Insert_Error(t *testing.T) {
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionName: "test_collection_name_1",
|
|
||||||
Description: "",
|
|
||||||
AutoID: false,
|
|
||||||
ShardsNum: common.DefaultShardsNum,
|
|
||||||
StartPosition: "",
|
|
||||||
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `collections` (`tenant_id`,`collection_id`,`collection_name`,`description`,`auto_id`,`shards_num`,`start_position`,`consistency_level`,`status`,`properties`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE `id`=`id`").
|
|
||||||
WithArgs(collection.TenantID, collection.CollectionID, collection.CollectionName, collection.Description, collection.AutoID, collection.ShardsNum, collection.StartPosition, collection.ConsistencyLevel, collection.Status, collection.Properties, collection.Ts, collection.IsDeleted, collection.CreatedAt, collection.UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := collTestDb.Insert(collection)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
type AnyTime struct{}
|
|
||||||
|
|
||||||
func (a AnyTime) Match(v driver.Value) bool {
|
|
||||||
_, ok := v.(time.Time)
|
|
||||||
return ok
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetBase() dbmodel.Base {
|
|
||||||
return dbmodel.Base{
|
|
||||||
TenantID: tenantID,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func SuccessExec(f func()) {
|
|
||||||
mock.ExpectBegin()
|
|
||||||
f()
|
|
||||||
mock.ExpectCommit()
|
|
||||||
}
|
|
||||||
|
|
||||||
func ErrorExec(f func()) {
|
|
||||||
mock.ExpectBegin()
|
|
||||||
f()
|
|
||||||
mock.ExpectRollback()
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_collectionDb_Update(t *testing.T) {
|
|
||||||
t.Run("normal case", func(t *testing.T) {
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionName: "test_collection_name_1",
|
|
||||||
Description: "",
|
|
||||||
AutoID: false,
|
|
||||||
ShardsNum: common.DefaultShardsNum,
|
|
||||||
StartPosition: "",
|
|
||||||
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `collections` SET `auto_id`=?,`collection_id`=?,`collection_name`=?,`consistency_level`=?,`created_at`=?,`description`=?,`is_deleted`=?,`properties`=?,`shards_num`=?,`start_position`=?,`status`=?,`tenant_id`=?,`ts`=?,`updated_at`=? WHERE id = ?").
|
|
||||||
WithArgs(collection.AutoID, collection.CollectionID, collection.CollectionName, collection.ConsistencyLevel, collection.CreatedAt, collection.Description, collection.IsDeleted, collection.Properties, collection.ShardsNum, collection.StartPosition, collection.Status, collection.TenantID, collection.Ts, collection.UpdatedAt, collection.ID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := collTestDb.Update(collection)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("error", func(t *testing.T) {
|
|
||||||
var collection = &dbmodel.Collection{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: collID1,
|
|
||||||
CollectionName: "test_collection_name_1",
|
|
||||||
Description: "",
|
|
||||||
AutoID: false,
|
|
||||||
ShardsNum: common.DefaultShardsNum,
|
|
||||||
StartPosition: "",
|
|
||||||
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `collections` SET `auto_id`=?,`collection_id`=?,`collection_name`=?,`consistency_level`=?,`created_at`=?,`description`=?,`is_deleted`=?,`properties`=?,`shards_num`=?,`start_position`=?,`status`=?,`tenant_id`=?,`ts`=?,`updated_at`=? WHERE id = ?").
|
|
||||||
WithArgs(collection.AutoID, collection.CollectionID, collection.CollectionName, collection.ConsistencyLevel, collection.CreatedAt, collection.Description, collection.IsDeleted, collection.Properties, collection.ShardsNum, collection.StartPosition, collection.Status, collection.TenantID, collection.Ts, collection.UpdatedAt, collection.ID).
|
|
||||||
WillReturnError(errors.New("error mock Update"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := collTestDb.Update(collection)
|
|
||||||
assert.Error(t, err)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,62 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbcore"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
)
|
|
||||||
|
|
||||||
type metaDomain struct{}
|
|
||||||
|
|
||||||
func NewMetaDomain() *metaDomain {
|
|
||||||
return &metaDomain{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*metaDomain) CollectionDb(ctx context.Context) dbmodel.ICollectionDb {
|
|
||||||
return &collectionDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*metaDomain) FieldDb(ctx context.Context) dbmodel.IFieldDb {
|
|
||||||
return &fieldDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*metaDomain) CollChannelDb(ctx context.Context) dbmodel.ICollChannelDb {
|
|
||||||
return &collChannelDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*metaDomain) CollAliasDb(ctx context.Context) dbmodel.ICollAliasDb {
|
|
||||||
return &collAliasDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*metaDomain) PartitionDb(ctx context.Context) dbmodel.IPartitionDb {
|
|
||||||
return &partitionDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*metaDomain) IndexDb(ctx context.Context) dbmodel.IIndexDb {
|
|
||||||
return &indexDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*metaDomain) SegmentIndexDb(ctx context.Context) dbmodel.ISegmentIndexDb {
|
|
||||||
return &segmentIndexDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*metaDomain) UserDb(ctx context.Context) dbmodel.IUserDb {
|
|
||||||
return &userDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *metaDomain) RoleDb(ctx context.Context) dbmodel.IRoleDb {
|
|
||||||
return &roleDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *metaDomain) UserRoleDb(ctx context.Context) dbmodel.IUserRoleDb {
|
|
||||||
return &userRoleDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *metaDomain) GrantDb(ctx context.Context) dbmodel.IGrantDb {
|
|
||||||
return &grantDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *metaDomain) GrantIDDb(ctx context.Context) dbmodel.IGrantIDDb {
|
|
||||||
return &grantIDDb{dbcore.GetDB(ctx)}
|
|
||||||
}
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type fieldDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *fieldDb) GetByCollectionID(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) ([]*dbmodel.Field, error) {
|
|
||||||
var r []*dbmodel.Field
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.Field{}).Where("tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false", tenantID, collectionID, ts).Find(&r).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get fields by collection_id and ts failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collectionID), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *fieldDb) Insert(in []*dbmodel.Field) error {
|
|
||||||
err := s.db.CreateInBatches(in, 100).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert field failed", zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@ -1,119 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestField_GetByCollID(t *testing.T) {
|
|
||||||
var fields = []*dbmodel.Field{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
FieldID: fieldID1,
|
|
||||||
FieldName: "test_field_1",
|
|
||||||
IsPrimaryKey: false,
|
|
||||||
Description: "",
|
|
||||||
DataType: schemapb.DataType_FloatVector,
|
|
||||||
TypeParams: "",
|
|
||||||
IndexParams: "",
|
|
||||||
AutoID: false,
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: ts,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `field_schemas` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "field_id", "field_name", "is_primary_key", "description", "data_type", "type_params", "index_params", "auto_id", "collection_id", "ts"}).
|
|
||||||
AddRow(fields[0].TenantID, fields[0].FieldID, fields[0].FieldName, fields[0].IsPrimaryKey, fields[0].Description, fields[0].DataType, fields[0].TypeParams, fields[0].IndexParams, fields[0].AutoID, fields[0].CollectionID, fields[0].Ts))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := fieldTestDb.GetByCollectionID(tenantID, collID1, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, fields, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestField_GetByCollID_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `field_schemas` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := fieldTestDb.GetByCollectionID(tenantID, collID1, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestField_Insert(t *testing.T) {
|
|
||||||
var fields = []*dbmodel.Field{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
FieldID: fieldID1,
|
|
||||||
FieldName: "test_field_1",
|
|
||||||
IsPrimaryKey: false,
|
|
||||||
Description: "",
|
|
||||||
DataType: schemapb.DataType_FloatVector,
|
|
||||||
TypeParams: "",
|
|
||||||
IndexParams: "",
|
|
||||||
AutoID: false,
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `field_schemas` (`tenant_id`,`field_id`,`field_name`,`is_primary_key`,`description`,`data_type`,`type_params`,`index_params`,`auto_id`,`collection_id`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(fields[0].TenantID, fields[0].FieldID, fields[0].FieldName, fields[0].IsPrimaryKey, fields[0].Description, fields[0].DataType, fields[0].TypeParams, fields[0].IndexParams, fields[0].AutoID, fields[0].CollectionID, fields[0].Ts, fields[0].IsDeleted, fields[0].CreatedAt, fields[0].UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := fieldTestDb.Insert(fields)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestField_Insert_Error(t *testing.T) {
|
|
||||||
var fields = []*dbmodel.Field{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
FieldID: fieldID1,
|
|
||||||
FieldName: "test_field_1",
|
|
||||||
IsPrimaryKey: false,
|
|
||||||
Description: "",
|
|
||||||
DataType: schemapb.DataType_FloatVector,
|
|
||||||
TypeParams: "",
|
|
||||||
IndexParams: "",
|
|
||||||
AutoID: false,
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `field_schemas` (`tenant_id`,`field_id`,`field_name`,`is_primary_key`,`description`,`data_type`,`type_params`,`index_params`,`auto_id`,`collection_id`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(fields[0].TenantID, fields[0].FieldID, fields[0].FieldName, fields[0].IsPrimaryKey, fields[0].Description, fields[0].DataType, fields[0].TypeParams, fields[0].IndexParams, fields[0].AutoID, fields[0].CollectionID, fields[0].Ts, fields[0].IsDeleted, fields[0].CreatedAt, fields[0].UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := fieldTestDb.Insert(fields)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type grantIDDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *grantIDDb) GetGrantIDs(tenantID string, grantID int64, privilege string, preloadGrant bool, preloadGrantor bool) ([]*dbmodel.GrantID, error) {
|
|
||||||
var (
|
|
||||||
grantIDs []*dbmodel.GrantID
|
|
||||||
db *gorm.DB
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
db = g.db.Model(&dbmodel.GrantID{}).
|
|
||||||
Where(&dbmodel.GrantID{GrantID: grantID, Privilege: privilege}).
|
|
||||||
Where(dbmodel.GetCommonCondition(tenantID, false))
|
|
||||||
if preloadGrant {
|
|
||||||
db = db.Preload("Grant")
|
|
||||||
}
|
|
||||||
if preloadGrantor {
|
|
||||||
db = db.Preload("Grantor")
|
|
||||||
}
|
|
||||||
err = db.Find(&grantIDs).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to get grant ids", zap.String("tenant_id", tenantID), zap.Int64("grantID", grantID), zap.String("privilege", privilege), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return grantIDs, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *grantIDDb) Insert(in *dbmodel.GrantID) error {
|
|
||||||
err := g.db.Create(in).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to insert the grant-id", zap.Any("in", in), zap.Error(err))
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *grantIDDb) Delete(tenantID string, grantID int64, privilege string) error {
|
|
||||||
err := g.db.Model(dbmodel.GrantID{}).
|
|
||||||
Where(&dbmodel.GrantID{GrantID: grantID, Privilege: privilege}).
|
|
||||||
Where(dbmodel.GetCommonCondition(tenantID, false)).
|
|
||||||
Update("is_deleted", true).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to delete the user-role", zap.String("tenant_id", tenantID), zap.Int64("grantID", grantID), zap.String("privilege", privilege), zap.Error(err))
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
@ -1,236 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestGrantID_GetGrantIDs(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grantID1 int64 = 10
|
|
||||||
grantID2 int64 = 20
|
|
||||||
grantorID1 int64 = 1
|
|
||||||
grantorID2 int64 = 2
|
|
||||||
privilege1 = "PrivilegeLoad"
|
|
||||||
privilege2 = "PrivilegeInsert"
|
|
||||||
grantIDs []*dbmodel.GrantID
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant_id` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "grant_id", "grantor_id", "privilege"}).
|
|
||||||
AddRow(tenantID, grantID1, grantorID1, privilege1).
|
|
||||||
AddRow(tenantID, grantID2, grantorID2, privilege2))
|
|
||||||
|
|
||||||
grantIDs, err = grantIDTestDb.GetGrantIDs(tenantID, 0, "", false, false)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(grantIDs))
|
|
||||||
assert.Equal(t, grantID1, grantIDs[0].GrantID)
|
|
||||||
assert.Equal(t, grantorID2, grantIDs[1].GrantorID)
|
|
||||||
assert.Equal(t, privilege2, grantIDs[1].Privilege)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant_id` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
_, err = grantIDTestDb.GetGrantIDs(tenantID, 0, "", false, false)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrantID_GetGrantIDs_Preload(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grantID1 int64 = 10
|
|
||||||
grantID2 int64 = 20
|
|
||||||
grantorID1 int64 = 1
|
|
||||||
grantorID2 int64 = 2
|
|
||||||
privilege1 = "PrivilegeLoad"
|
|
||||||
privilege2 = "PrivilegeInsert"
|
|
||||||
grantIDs []*dbmodel.GrantID
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant_id` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "grant_id", "grantor_id", "privilege"}).
|
|
||||||
AddRow(tenantID, grantID1, grantorID1, privilege1).
|
|
||||||
AddRow(tenantID, grantID2, grantorID2, privilege2))
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant` WHERE `grant`.`id` IN (?,?)").
|
|
||||||
WithArgs(grantID1, grantID2).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "object"}).
|
|
||||||
AddRow(grantID1, tenantID, "obj1").
|
|
||||||
AddRow(grantID2, tenantID, "obj2"))
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE `credential_users`.`id` IN (?,?)").
|
|
||||||
WithArgs(grantorID1, grantorID2).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "username"}).
|
|
||||||
AddRow(grantorID1, tenantID, "fo1").
|
|
||||||
AddRow(grantorID2, tenantID, "fo2"))
|
|
||||||
|
|
||||||
grantIDs, err = grantIDTestDb.GetGrantIDs(tenantID, 0, "", true, true)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(grantIDs))
|
|
||||||
assert.Equal(t, grantID1, grantIDs[0].GrantID)
|
|
||||||
assert.Equal(t, "obj1", grantIDs[0].Grant.Object)
|
|
||||||
assert.Equal(t, grantorID2, grantIDs[1].GrantorID)
|
|
||||||
assert.Equal(t, privilege2, grantIDs[1].Privilege)
|
|
||||||
assert.Equal(t, "fo2", grantIDs[1].Grantor.Username)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant_id` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
_, err = grantIDTestDb.GetGrantIDs(tenantID, 0, "", true, true)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrantID_GetGrantIDs_WithGrant(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grantID1 int64 = 10
|
|
||||||
grantorID1 int64 = 1
|
|
||||||
grantorID2 int64 = 2
|
|
||||||
privilege1 = "PrivilegeLoad"
|
|
||||||
privilege2 = "PrivilegeInsert"
|
|
||||||
grantIDs []*dbmodel.GrantID
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant_id` WHERE `grant_id`.`grant_id` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(grantID1, false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "grant_id", "grantor_id", "privilege"}).
|
|
||||||
AddRow(tenantID, grantID1, grantorID1, privilege1).
|
|
||||||
AddRow(tenantID, grantID1, grantorID2, privilege2))
|
|
||||||
|
|
||||||
grantIDs, err = grantIDTestDb.GetGrantIDs(tenantID, grantID1, "", false, false)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(grantIDs))
|
|
||||||
assert.Equal(t, grantID1, grantIDs[0].GrantID)
|
|
||||||
assert.Equal(t, grantorID2, grantIDs[1].GrantorID)
|
|
||||||
assert.Equal(t, privilege2, grantIDs[1].Privilege)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrantID_GetGrantIDs_WithGrantAndPrivilege(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grantID1 int64 = 10
|
|
||||||
grantorID1 int64 = 1
|
|
||||||
privilege1 = "PrivilegeLoad"
|
|
||||||
grantIDs []*dbmodel.GrantID
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant_id` WHERE `grant_id`.`grant_id` = ? AND `grant_id`.`privilege` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(grantID1, privilege1, false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "grant_id", "grantor_id", "privilege"}).
|
|
||||||
AddRow(tenantID, grantID1, grantorID1, privilege1))
|
|
||||||
|
|
||||||
grantIDs, err = grantIDTestDb.GetGrantIDs(tenantID, grantID1, privilege1, false, false)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 1, len(grantIDs))
|
|
||||||
assert.Equal(t, grantID1, grantIDs[0].GrantID)
|
|
||||||
assert.Equal(t, privilege1, grantIDs[0].Privilege)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrantID_Insert(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grantID *dbmodel.GrantID
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
grantID = &dbmodel.GrantID{
|
|
||||||
Base: GetBase(),
|
|
||||||
GrantID: 1,
|
|
||||||
GrantorID: 10,
|
|
||||||
Privilege: "PrivilegeLoad",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `grant_id` (`tenant_id`,`is_deleted`,`created_at`,`updated_at`,`grant_id`,`privilege`,`grantor_id`) VALUES (?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(grantID.TenantID, grantID.IsDeleted, grantID.CreatedAt, grantID.UpdatedAt, grantID.GrantID, grantID.Privilege, grantID.GrantorID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
err = grantIDTestDb.Insert(grantID)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrantID_Insert_Error(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grantID *dbmodel.GrantID
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
grantID = &dbmodel.GrantID{
|
|
||||||
Base: GetBase(),
|
|
||||||
GrantID: 1,
|
|
||||||
GrantorID: 10,
|
|
||||||
Privilege: "PrivilegeLoad",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `grant_id` (`tenant_id`,`is_deleted`,`created_at`,`updated_at`,`grant_id`,`privilege`,`grantor_id`) VALUES (?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(grantID.TenantID, grantID.IsDeleted, grantID.CreatedAt, grantID.UpdatedAt, grantID.GrantID, grantID.Privilege, grantID.GrantorID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
err = grantIDTestDb.Insert(grantID)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrantID_Delete(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grantID *dbmodel.GrantID
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
grantID = &dbmodel.GrantID{
|
|
||||||
Base: GetBase(),
|
|
||||||
GrantID: 1,
|
|
||||||
GrantorID: 10,
|
|
||||||
Privilege: "PrivilegeLoad",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `grant_id` SET `is_deleted`=?,`updated_at`=? WHERE `grant_id`.`grant_id` = ? AND `grant_id`.`privilege` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(true, AnyTime{}, grantID.GrantID, grantID.Privilege, grantID.IsDeleted, grantID.TenantID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
err = grantIDTestDb.Delete(grantID.TenantID, grantID.GrantID, grantID.Privilege)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrantID_Delete_Error(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grantID *dbmodel.GrantID
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
grantID = &dbmodel.GrantID{
|
|
||||||
Base: GetBase(),
|
|
||||||
GrantID: 1,
|
|
||||||
GrantorID: 10,
|
|
||||||
Privilege: "PrivilegeLoad",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `grant_id` SET `is_deleted`=?,`updated_at`=? WHERE `grant_id`.`grant_id` = ? AND `grant_id`.`privilege` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(true, AnyTime{}, grantID.GrantID, grantID.Privilege, grantID.IsDeleted, grantID.TenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
err = grantIDTestDb.Delete(grantID.TenantID, grantID.GrantID, grantID.Privilege)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type grantDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *grantDb) GetGrants(tenantID string, roleID int64, object string, objectName string) ([]*dbmodel.Grant, error) {
|
|
||||||
var (
|
|
||||||
grants []*dbmodel.Grant
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
err = g.db.Model(&dbmodel.Grant{}).
|
|
||||||
Where(&dbmodel.Grant{RoleID: roleID, Object: object, ObjectName: objectName}).
|
|
||||||
Where(dbmodel.GetCommonCondition(tenantID, false)).
|
|
||||||
Preload("Role").
|
|
||||||
Find(&grants).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to get grants", zap.String("tenant_id", tenantID), zap.Int64("roleID", roleID), zap.String("object", object), zap.String("object_name", objectName), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return grants, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *grantDb) Insert(in *dbmodel.Grant) error {
|
|
||||||
err := g.db.Create(in).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to insert the grant", zap.Any("in", in), zap.Error(err))
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *grantDb) Delete(tenantID string, roleID int64, object string, objectName string) error {
|
|
||||||
err := g.db.Model(dbmodel.Grant{}).
|
|
||||||
Where(&dbmodel.Grant{RoleID: roleID, Object: object, ObjectName: objectName}).
|
|
||||||
Where(dbmodel.GetCommonCondition(tenantID, false)).
|
|
||||||
Update("is_deleted", true).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to delete the grant", zap.String("tenant_id", tenantID), zap.Int64("roleID", roleID), zap.String("object", object), zap.String("object_name", objectName), zap.Error(err))
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
@ -1,203 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestGrant_GetGrants(t *testing.T) {
|
|
||||||
var (
|
|
||||||
roleID1 = 10
|
|
||||||
roleID2 = 20
|
|
||||||
object = "Collection"
|
|
||||||
objectName = "col1"
|
|
||||||
grants []*dbmodel.Grant
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "role_id", "object", "object_name"}).
|
|
||||||
AddRow(tenantID, roleID1, object, objectName).
|
|
||||||
AddRow(tenantID, roleID2, object, objectName))
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `role`.`id` IN (?,?)").
|
|
||||||
WithArgs(roleID1, roleID2).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "name"}).
|
|
||||||
AddRow(roleID1, tenantID, "foo1").
|
|
||||||
AddRow(roleID2, tenantID, "foo2"))
|
|
||||||
|
|
||||||
grants, err = grantTestDb.GetGrants(tenantID, 0, "", "")
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(grants))
|
|
||||||
assert.Equal(t, "foo2", grants[1].Role.Name)
|
|
||||||
assert.Equal(t, object, grants[0].Object)
|
|
||||||
assert.Equal(t, objectName, grants[0].ObjectName)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
_, err = grantTestDb.GetGrants(tenantID, 0, "", "")
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrant_GetGrantsWithRoleID(t *testing.T) {
|
|
||||||
var (
|
|
||||||
roleID1 = 10
|
|
||||||
object1 = "Collection"
|
|
||||||
objectName1 = "col1"
|
|
||||||
object2 = "Global"
|
|
||||||
objectName2 = "*"
|
|
||||||
grants []*dbmodel.Grant
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant` WHERE `grant`.`role_id` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(roleID1, false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "role_id", "object", "object_name"}).
|
|
||||||
AddRow(tenantID, roleID1, object1, objectName1).
|
|
||||||
AddRow(tenantID, roleID1, object2, objectName2))
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `role`.`id` = ?").
|
|
||||||
WithArgs(roleID1).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "name"}).
|
|
||||||
AddRow(roleID1, tenantID, "foo1"))
|
|
||||||
|
|
||||||
grants, err = grantTestDb.GetGrants(tenantID, int64(roleID1), "", "")
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(grants))
|
|
||||||
assert.Equal(t, "foo1", grants[0].Role.Name)
|
|
||||||
assert.Equal(t, object1, grants[0].Object)
|
|
||||||
assert.Equal(t, objectName2, grants[1].ObjectName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrant_GetGrantsWithObject(t *testing.T) {
|
|
||||||
var (
|
|
||||||
roleID = 10
|
|
||||||
object = "Collection"
|
|
||||||
objectName = "col1"
|
|
||||||
grants []*dbmodel.Grant
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `grant` WHERE `grant`.`role_id` = ? AND `grant`.`object` = ? AND `grant`.`object_name` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(roleID, object, objectName, false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "role_id", "object", "object_name"}).
|
|
||||||
AddRow(tenantID, roleID, object, objectName))
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `role`.`id` = ?").
|
|
||||||
WithArgs(roleID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "name"}).
|
|
||||||
AddRow(roleID, tenantID, "foo1"))
|
|
||||||
|
|
||||||
grants, err = grantTestDb.GetGrants(tenantID, int64(roleID), object, objectName)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 1, len(grants))
|
|
||||||
assert.Equal(t, "foo1", grants[0].Role.Name)
|
|
||||||
assert.Equal(t, object, grants[0].Object)
|
|
||||||
assert.Equal(t, objectName, grants[0].ObjectName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrant_Insert(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grant *dbmodel.Grant
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
grant = &dbmodel.Grant{
|
|
||||||
Base: GetBase(),
|
|
||||||
RoleID: 1,
|
|
||||||
Object: "Global",
|
|
||||||
ObjectName: "Col",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `grant` (`tenant_id`,`is_deleted`,`created_at`,`updated_at`,`role_id`,`object`,`object_name`) VALUES (?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(grant.TenantID, grant.IsDeleted, grant.CreatedAt, grant.UpdatedAt, grant.RoleID, grant.Object, grant.ObjectName).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
err = grantTestDb.Insert(grant)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrant_Insert_Error(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grant *dbmodel.Grant
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
grant = &dbmodel.Grant{
|
|
||||||
Base: GetBase(),
|
|
||||||
RoleID: 1,
|
|
||||||
Object: "Global",
|
|
||||||
ObjectName: "Col",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `grant` (`tenant_id`,`is_deleted`,`created_at`,`updated_at`,`role_id`,`object`,`object_name`) VALUES (?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(grant.TenantID, grant.IsDeleted, grant.CreatedAt, grant.UpdatedAt, grant.RoleID, grant.Object, grant.ObjectName).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
err = grantTestDb.Insert(grant)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrant_Delete(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grant *dbmodel.Grant
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
grant = &dbmodel.Grant{
|
|
||||||
Base: GetBase(),
|
|
||||||
RoleID: 1,
|
|
||||||
Object: "Global",
|
|
||||||
ObjectName: "Col",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `grant` SET `is_deleted`=?,`updated_at`=? WHERE `grant`.`role_id` = ? AND `grant`.`object` = ? AND `grant`.`object_name` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(true, AnyTime{}, grant.RoleID, grant.Object, grant.ObjectName, false, grant.TenantID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
err = grantTestDb.Delete(grant.TenantID, grant.RoleID, grant.Object, grant.ObjectName)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGrant_Delete_Error(t *testing.T) {
|
|
||||||
var (
|
|
||||||
grant *dbmodel.Grant
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
grant = &dbmodel.Grant{
|
|
||||||
Base: GetBase(),
|
|
||||||
RoleID: 1,
|
|
||||||
Object: "Global",
|
|
||||||
ObjectName: "Col",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `grant` SET `is_deleted`=?,`updated_at`=? WHERE `grant`.`role_id` = ? AND `grant`.`object` = ? AND `grant`.`object_name` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(true, AnyTime{}, grant.RoleID, grant.Object, grant.ObjectName, false, grant.TenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
err = grantTestDb.Delete(grant.TenantID, grant.RoleID, grant.Object, grant.ObjectName)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,93 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type indexDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *indexDb) Get(tenantID string, collectionID typeutil.UniqueID) ([]*dbmodel.Index, error) {
|
|
||||||
var r []*dbmodel.Index
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.Index{}).Where("tenant_id = ? AND collection_id = ?", tenantID, collectionID).Find(&r).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get indexes by collection_id failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collectionID), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *indexDb) List(tenantID string) ([]*dbmodel.IndexResult, error) {
|
|
||||||
tx := s.db.Table("indexes").
|
|
||||||
Select("indexes.field_id AS field_id, indexes.collection_id AS collection_id, indexes.index_id AS index_id, "+
|
|
||||||
"indexes.index_name AS index_name, indexes.index_params AS index_params, indexes.type_params AS type_params, "+
|
|
||||||
"indexes.is_deleted AS is_deleted, indexes.create_time AS create_time").
|
|
||||||
Where("indexes.tenant_id = ?", tenantID)
|
|
||||||
|
|
||||||
var rs []*dbmodel.IndexResult
|
|
||||||
err := tx.Scan(&rs).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("list indexes by join failed", zap.String("tenant", tenantID), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return rs, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *indexDb) Insert(in []*dbmodel.Index) error {
|
|
||||||
err := s.db.CreateInBatches(in, 100).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert index failed", zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *indexDb) Update(in *dbmodel.Index) error {
|
|
||||||
err := s.db.Model(&dbmodel.Index{}).Where("tenant_id = ? AND collection_id = ? AND index_id = ?", in.TenantID, in.CollectionID, in.IndexID).Updates(dbmodel.Index{
|
|
||||||
CreateTime: in.CreateTime, // if in.CreateTime is not set, column CreateTime will not be updated
|
|
||||||
IsDeleted: in.IsDeleted,
|
|
||||||
}).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("update indexes failed", zap.String("tenant", in.TenantID), zap.Int64("collectionID", in.CollectionID), zap.Int64("indexID", in.IndexID), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *indexDb) MarkDeletedByCollectionID(tenantID string, collID typeutil.UniqueID) error {
|
|
||||||
err := s.db.Model(&dbmodel.Index{}).Where("tenant_id = ? AND collection_id = ?", tenantID, collID).Updates(dbmodel.Index{
|
|
||||||
IsDeleted: true,
|
|
||||||
}).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("update indexes is_deleted=true failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collID), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *indexDb) MarkDeletedByIndexID(tenantID string, indexID typeutil.UniqueID) error {
|
|
||||||
err := s.db.Model(&dbmodel.Index{}).Where("tenant_id = ? AND index_id = ?", tenantID, indexID).Updates(dbmodel.Index{
|
|
||||||
IsDeleted: true,
|
|
||||||
}).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("update indexes is_deleted=true failed", zap.String("tenant", tenantID), zap.Int64("indexID", indexID), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@ -1,248 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestIndex_Get(t *testing.T) {
|
|
||||||
var indexes = []*dbmodel.Index{
|
|
||||||
{
|
|
||||||
TenantID: "",
|
|
||||||
FieldID: fieldID1,
|
|
||||||
CollectionID: collID1,
|
|
||||||
IndexID: indexID1,
|
|
||||||
IndexName: "test_index_1",
|
|
||||||
IndexParams: "",
|
|
||||||
TypeParams: "",
|
|
||||||
UserIndexParams: "",
|
|
||||||
IsAutoIndex: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `indexes` WHERE tenant_id = ? AND collection_id = ?").
|
|
||||||
WithArgs(tenantID, collID1).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "field_id", "collection_id", "index_id", "index_name", "index_params", "type_params"}).
|
|
||||||
AddRow(indexes[0].TenantID, indexes[0].FieldID, indexes[0].CollectionID, indexes[0].IndexID, indexes[0].IndexName, indexes[0].IndexParams, indexes[0].TypeParams))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := indexTestDb.Get(tenantID, collID1)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, indexes, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_Get_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `indexes` WHERE tenant_id = ? AND collection_id = ?").
|
|
||||||
WithArgs(tenantID, collID1).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := indexTestDb.Get(tenantID, collID1)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_List(t *testing.T) {
|
|
||||||
var indexResults = []*dbmodel.IndexResult{
|
|
||||||
{
|
|
||||||
FieldID: fieldID1,
|
|
||||||
CollectionID: collID1,
|
|
||||||
IndexID: indexID1,
|
|
||||||
IndexName: "test_index_1",
|
|
||||||
TypeParams: "",
|
|
||||||
IndexParams: "",
|
|
||||||
UserIndexParams: "",
|
|
||||||
IsAutoIndex: false,
|
|
||||||
CreateTime: uint64(1011),
|
|
||||||
IsDeleted: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT indexes.field_id AS field_id, indexes.collection_id AS collection_id, indexes.index_id AS index_id, indexes.index_name AS index_name, indexes.index_params AS index_params, indexes.type_params AS type_params, indexes.is_deleted AS is_deleted, indexes.create_time AS create_time FROM `indexes` WHERE indexes.tenant_id = ?").
|
|
||||||
WithArgs(tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"field_id", "collection_id", "index_id", "index_name", "index_params", "type_params", "user_index_params", "is_auto_index", "is_deleted", "create_time"}).
|
|
||||||
AddRow(indexResults[0].FieldID, indexResults[0].CollectionID, indexResults[0].IndexID, indexResults[0].IndexName, indexResults[0].IndexParams, indexResults[0].TypeParams, indexResults[0].UserIndexParams, indexResults[0].IsAutoIndex, indexResults[0].IsDeleted, indexResults[0].CreateTime))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := indexTestDb.List(tenantID)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, indexResults, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_List_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT indexes.field_id AS field_id, indexes.collection_id AS collection_id, indexes.index_id AS index_id, indexes.index_name AS index_name, indexes.index_params AS index_params, indexes.type_params AS type_params, indexes.is_deleted AS is_deleted, indexes.create_time AS create_time FROM `indexes` WHERE indexes.tenant_id = ?").
|
|
||||||
WithArgs(tenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := indexTestDb.List(tenantID)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_Insert(t *testing.T) {
|
|
||||||
var indexes = []*dbmodel.Index{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
FieldID: fieldID1,
|
|
||||||
CollectionID: collID1,
|
|
||||||
IndexID: indexID1,
|
|
||||||
IndexName: "test_index_1",
|
|
||||||
IndexParams: "",
|
|
||||||
TypeParams: "",
|
|
||||||
UserIndexParams: "",
|
|
||||||
IsAutoIndex: false,
|
|
||||||
CreateTime: uint64(1011),
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `indexes` (`tenant_id`,`field_id`,`collection_id`,`index_id`,`index_name`,`index_params`,`type_params`,`user_index_params`,`is_auto_index`,`create_time`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(indexes[0].TenantID, indexes[0].FieldID, indexes[0].CollectionID, indexes[0].IndexID, indexes[0].IndexName, indexes[0].IndexParams, indexes[0].TypeParams, indexes[0].UserIndexParams, indexes[0].IsAutoIndex, indexes[0].CreateTime, indexes[0].IsDeleted, indexes[0].CreatedAt, indexes[0].UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := indexTestDb.Insert(indexes)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_Insert_Error(t *testing.T) {
|
|
||||||
var indexes = []*dbmodel.Index{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
FieldID: fieldID1,
|
|
||||||
CollectionID: collID1,
|
|
||||||
IndexID: indexID1,
|
|
||||||
IndexName: "test_index_1",
|
|
||||||
IndexParams: "",
|
|
||||||
CreateTime: uint64(1011),
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `indexes` (`tenant_id`,`field_id`,`collection_id`,`index_id`,`index_name`,`index_params`,`type_params`, `user_index_params`, `is_auto_index`, `create_time`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(indexes[0].TenantID, indexes[0].FieldID, indexes[0].CollectionID, indexes[0].IndexID, indexes[0].IndexName, indexes[0].IndexParams, indexes[0].TypeParams, indexes[0].UserIndexParams, indexes[0].IsAutoIndex, indexes[0].CreateTime, indexes[0].IsDeleted, indexes[0].CreatedAt, indexes[0].UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := indexTestDb.Insert(indexes)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_Update(t *testing.T) {
|
|
||||||
var index = &dbmodel.Index{
|
|
||||||
TenantID: tenantID,
|
|
||||||
IndexName: "test_index_name_1",
|
|
||||||
IndexID: indexID1,
|
|
||||||
IndexParams: "",
|
|
||||||
IsDeleted: true,
|
|
||||||
CreateTime: uint64(1112),
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `indexes` SET `create_time`=?,`is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND collection_id = ? AND index_id = ?").
|
|
||||||
WithArgs(index.CreateTime, index.IsDeleted, AnyTime{}, index.TenantID, index.CollectionID, index.IndexID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := indexTestDb.Update(index)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_Update_Error(t *testing.T) {
|
|
||||||
var index = &dbmodel.Index{
|
|
||||||
TenantID: tenantID,
|
|
||||||
IndexName: "test_index_name_1",
|
|
||||||
IndexID: indexID1,
|
|
||||||
IndexParams: "",
|
|
||||||
IsDeleted: false,
|
|
||||||
CreateTime: uint64(1112),
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `indexes` SET `create_time`=?,`updated_at`=? WHERE tenant_id = ? AND collection_id = ? AND index_id = ?").
|
|
||||||
WithArgs(index.CreateTime, AnyTime{}, index.TenantID, index.CollectionID, index.IndexID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := indexTestDb.Update(index)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_MarkDeletedByCollID(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND collection_id = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, collID1).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := indexTestDb.MarkDeletedByCollectionID(tenantID, collID1)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_MarkDeletedByCollID_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND collection_id = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, collID1).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := indexTestDb.MarkDeletedByCollectionID(tenantID, collID1)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_MarkDeletedByIdxID(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND index_id = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, indexID1).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := indexTestDb.MarkDeletedByIndexID(tenantID, indexID1)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIndex_MarkDeletedByIdxID_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND index_id = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, indexID1).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := indexTestDb.MarkDeletedByIndexID(tenantID, indexID1)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,57 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type partitionDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *partitionDb) GetByCollectionID(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) ([]*dbmodel.Partition, error) {
|
|
||||||
var r []*dbmodel.Partition
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.Partition{}).Where("tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false", tenantID, collectionID, ts).Find(&r).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get partitions by collection_id and ts failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collectionID), zap.Uint64("ts", ts), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *partitionDb) Insert(in []*dbmodel.Partition) error {
|
|
||||||
err := s.db.CreateInBatches(in, 100).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert partition failed", zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func generatePartitionUpdatesWithoutID(in *dbmodel.Partition) map[string]interface{} {
|
|
||||||
ret := map[string]interface{}{
|
|
||||||
"tenant_id": in.TenantID,
|
|
||||||
"partition_id": in.PartitionID,
|
|
||||||
"partition_name": in.PartitionName,
|
|
||||||
"partition_created_timestamp": in.PartitionCreatedTimestamp,
|
|
||||||
"collection_id": in.CollectionID,
|
|
||||||
"status": in.Status,
|
|
||||||
"ts": in.Ts,
|
|
||||||
"is_deleted": in.IsDeleted,
|
|
||||||
"created_at": in.CreatedAt,
|
|
||||||
"updated_at": in.UpdatedAt,
|
|
||||||
}
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *partitionDb) Update(in *dbmodel.Partition) error {
|
|
||||||
updates := generatePartitionUpdatesWithoutID(in)
|
|
||||||
return s.db.Model(&dbmodel.Partition{}).Where("id = ?", in.ID).Updates(updates).Error
|
|
||||||
}
|
|
||||||
@ -1,155 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestPartition_GetByCollID(t *testing.T) {
|
|
||||||
var partitions = []*dbmodel.Partition{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
PartitionID: fieldID1,
|
|
||||||
PartitionName: "test_field_1",
|
|
||||||
PartitionCreatedTimestamp: typeutil.Timestamp(1000),
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: ts,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `partitions` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "partition_id", "partition_name", "partition_created_timestamp", "collection_id", "ts"}).
|
|
||||||
AddRow(partitions[0].TenantID, partitions[0].PartitionID, partitions[0].PartitionName, partitions[0].PartitionCreatedTimestamp, partitions[0].CollectionID, partitions[0].Ts))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := partitionTestDb.GetByCollectionID(tenantID, collID1, ts)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, partitions, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPartition_GetByCollID_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `partitions` WHERE tenant_id = ? AND collection_id = ? AND ts = ? AND is_deleted = false").
|
|
||||||
WithArgs(tenantID, collID1, ts).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := partitionTestDb.GetByCollectionID(tenantID, collID1, ts)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPartition_Insert(t *testing.T) {
|
|
||||||
var partitions = []*dbmodel.Partition{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
PartitionID: fieldID1,
|
|
||||||
PartitionName: "test_field_1",
|
|
||||||
PartitionCreatedTimestamp: typeutil.Timestamp(1000),
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `partitions` (`tenant_id`,`partition_id`,`partition_name`,`partition_created_timestamp`,`collection_id`,`status`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(partitions[0].TenantID, partitions[0].PartitionID, partitions[0].PartitionName, partitions[0].PartitionCreatedTimestamp, partitions[0].CollectionID, partitions[0].Status, partitions[0].Ts, partitions[0].IsDeleted, partitions[0].CreatedAt, partitions[0].UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := partitionTestDb.Insert(partitions)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPartition_Insert_Error(t *testing.T) {
|
|
||||||
var partitions = []*dbmodel.Partition{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
PartitionID: fieldID1,
|
|
||||||
PartitionName: "test_field_1",
|
|
||||||
PartitionCreatedTimestamp: typeutil.Timestamp(1000),
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `partitions` (`tenant_id`,`partition_id`,`partition_name`,`partition_created_timestamp`,`collection_id`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(partitions[0].TenantID, partitions[0].PartitionID, partitions[0].PartitionName, partitions[0].PartitionCreatedTimestamp, partitions[0].CollectionID, partitions[0].Ts, partitions[0].IsDeleted, partitions[0].CreatedAt, partitions[0].UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := partitionTestDb.Insert(partitions)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_partitionDb_Update(t *testing.T) {
|
|
||||||
t.Run("normal case", func(t *testing.T) {
|
|
||||||
partition := &dbmodel.Partition{
|
|
||||||
ID: 100,
|
|
||||||
TenantID: tenantID,
|
|
||||||
PartitionID: fieldID1,
|
|
||||||
PartitionName: "test_field_1",
|
|
||||||
PartitionCreatedTimestamp: typeutil.Timestamp(1000),
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `partitions` SET `collection_id`=?,`created_at`=?,`is_deleted`=?,`partition_created_timestamp`=?,`partition_id`=?,`partition_name`=?,`status`=?,`tenant_id`=?,`ts`=?,`updated_at`=? WHERE id = ?").
|
|
||||||
WithArgs(partition.CollectionID, partition.CreatedAt, partition.IsDeleted, partition.PartitionCreatedTimestamp, partition.PartitionID, partition.PartitionName, partition.Status, partition.TenantID, partition.Ts, partition.UpdatedAt, partition.ID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
err := partitionTestDb.Update(partition)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("error case", func(t *testing.T) {
|
|
||||||
partition := &dbmodel.Partition{
|
|
||||||
ID: 100,
|
|
||||||
TenantID: tenantID,
|
|
||||||
PartitionID: fieldID1,
|
|
||||||
PartitionName: "test_field_1",
|
|
||||||
PartitionCreatedTimestamp: typeutil.Timestamp(1000),
|
|
||||||
CollectionID: collID1,
|
|
||||||
Ts: ts,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `partitions` SET `collection_id`=?,`created_at`=?,`is_deleted`=?,`partition_created_timestamp`=?,`partition_id`=?,`partition_name`=?,`status`=?,`tenant_id`=?,`ts`=?,`updated_at`=? WHERE id = ?").
|
|
||||||
WithArgs(partition.CollectionID, partition.CreatedAt, partition.IsDeleted, partition.PartitionCreatedTimestamp, partition.PartitionID, partition.PartitionName, partition.Status, partition.TenantID, partition.Ts, partition.UpdatedAt, partition.ID).
|
|
||||||
WillReturnError(errors.New("error mock Update Partition"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
err := partitionTestDb.Update(partition)
|
|
||||||
assert.Error(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -1,48 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type roleDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roleDb) GetRoles(tenantID string, name string) ([]*dbmodel.Role, error) {
|
|
||||||
var (
|
|
||||||
roles []*dbmodel.Role
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
err = r.db.Model(&dbmodel.Role{}).
|
|
||||||
Where(&dbmodel.Role{Name: name}).
|
|
||||||
Where(dbmodel.GetCommonCondition(tenantID, false)).
|
|
||||||
Find(&roles).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to get roles", zap.String("tenant_id", tenantID), zap.String("name", name), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return roles, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roleDb) Insert(in *dbmodel.Role) error {
|
|
||||||
err := r.db.Create(in).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to insert the role", zap.Any("in", in), zap.Error(err))
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *roleDb) Delete(tenantID string, name string) error {
|
|
||||||
err := r.db.Model(dbmodel.Role{}).
|
|
||||||
Where(&dbmodel.Role{Name: name}).
|
|
||||||
Where(dbmodel.GetCommonCondition(tenantID, false)).
|
|
||||||
Update("is_deleted", true).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to delete the role", zap.String("tenant_id", tenantID), zap.String("name", name), zap.Error(err))
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
@ -1,133 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRole_GetRoles(t *testing.T) {
|
|
||||||
var (
|
|
||||||
roles []*dbmodel.Role
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "name"}).
|
|
||||||
AddRow(tenantID, "foo1").
|
|
||||||
AddRow(tenantID, "foo2"))
|
|
||||||
|
|
||||||
roles, err = roleTestDb.GetRoles(tenantID, "")
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(roles))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRole_GetRoles_Error(t *testing.T) {
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
_, err := roleTestDb.GetRoles(tenantID, "")
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRole_GetRoles_WithRoleName(t *testing.T) {
|
|
||||||
var (
|
|
||||||
roleName = "foo1"
|
|
||||||
roles []*dbmodel.Role
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `role`.`name` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(roleName, false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "name"}).
|
|
||||||
AddRow(tenantID, roleName))
|
|
||||||
roles, err = roleTestDb.GetRoles(tenantID, roleName)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 1, len(roles))
|
|
||||||
assert.Equal(t, roleName, roles[0].Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRole_Insert(t *testing.T) {
|
|
||||||
var (
|
|
||||||
role *dbmodel.Role
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
role = &dbmodel.Role{
|
|
||||||
Base: GetBase(),
|
|
||||||
Name: "foo",
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `role` (`tenant_id`,`is_deleted`,`created_at`,`updated_at`,`name`) VALUES (?,?,?,?,?)").
|
|
||||||
WithArgs(role.TenantID, role.IsDeleted, role.CreatedAt, role.UpdatedAt, role.Name).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
err = roleTestDb.Insert(role)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRole_Insert_Error(t *testing.T) {
|
|
||||||
var (
|
|
||||||
role *dbmodel.Role
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
role = &dbmodel.Role{
|
|
||||||
Base: GetBase(),
|
|
||||||
Name: "foo",
|
|
||||||
}
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `role` (`tenant_id`,`is_deleted`,`created_at`,`updated_at`,`name`) VALUES (?,?,?,?,?)").
|
|
||||||
WithArgs(role.TenantID, role.IsDeleted, role.CreatedAt, role.UpdatedAt, role.Name).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
err = roleTestDb.Insert(role)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRole_Delete(t *testing.T) {
|
|
||||||
var (
|
|
||||||
role *dbmodel.Role
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
role = &dbmodel.Role{
|
|
||||||
Base: GetBase(),
|
|
||||||
Name: "foo",
|
|
||||||
}
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `role` SET `is_deleted`=?,`updated_at`=? WHERE `role`.`name` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(true, AnyTime{}, role.Name, role.IsDeleted, role.TenantID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
err = roleTestDb.Delete(role.TenantID, role.Name)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRole_Delete_Error(t *testing.T) {
|
|
||||||
var (
|
|
||||||
role *dbmodel.Role
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
role = &dbmodel.Role{
|
|
||||||
Base: GetBase(),
|
|
||||||
Name: "foo",
|
|
||||||
}
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `role` SET `is_deleted`=?,`updated_at`=? WHERE `role`.`name` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(true, AnyTime{}, role.Name, role.IsDeleted, role.TenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
err = roleTestDb.Delete(role.TenantID, role.Name)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,125 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
"gorm.io/gorm/clause"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type segmentIndexDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *segmentIndexDb) Get(tenantID string, collectionID, buildID typeutil.UniqueID) ([]*dbmodel.SegmentIndexResult, error) {
|
|
||||||
var r []*dbmodel.SegmentIndexResult
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.SegmentIndex{}).Where("tenant_id = ? AND collection_id = ? AND build_id = ?", tenantID, collectionID, buildID).Find(&r).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get indexes by collection_id failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collectionID), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *segmentIndexDb) List(tenantID string) ([]*dbmodel.SegmentIndexResult, error) {
|
|
||||||
tx := s.db.Table("segment_indexes").
|
|
||||||
Select("segment_indexes.collection_id AS collection_id, segment_indexes.partition_id AS partition_id, "+
|
|
||||||
"segment_indexes.segment_id AS segment_id, segment_indexes.num_rows AS num_rows, segment_indexes.index_id AS index_id, "+
|
|
||||||
"segment_indexes.build_id AS build_id, segment_indexes.node_id AS node_id, segment_indexes.index_version AS index_version, "+
|
|
||||||
"segment_indexes.index_state AS index_state,segment_indexes.fail_reason AS fail_reason, segment_indexes.create_time AS create_time,"+
|
|
||||||
"segment_indexes.index_file_keys AS index_file_keys, segment_indexes.index_size AS index_size, segment_indexes.is_deleted AS is_deleted").
|
|
||||||
Where("indexes.tenant_id = ?", tenantID)
|
|
||||||
|
|
||||||
var rs []*dbmodel.SegmentIndexResult
|
|
||||||
err := tx.Scan(&rs).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("list indexes by join failed", zap.String("tenant", tenantID), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return rs, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *segmentIndexDb) Insert(in []*dbmodel.SegmentIndex) error {
|
|
||||||
err := s.db.CreateInBatches(in, 100).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert segment_indexes failed", zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *segmentIndexDb) Update(in *dbmodel.SegmentIndex) error {
|
|
||||||
err := s.db.CreateInBatches(in, 100).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert segment_indexes failed", zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *segmentIndexDb) Upsert(in []*dbmodel.SegmentIndex) error {
|
|
||||||
err := s.db.Clauses(clause.OnConflict{
|
|
||||||
// constraint UNIQUE (tenant_id, segment_id, index_id)
|
|
||||||
DoUpdates: clause.AssignmentColumns([]string{"index_build_id", "enable_index", "create_time"}),
|
|
||||||
}).CreateInBatches(in, 100).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("upsert segment_indexes failed", zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *segmentIndexDb) MarkDeleted(tenantID string, segIndexes []*dbmodel.SegmentIndex) error {
|
|
||||||
inValues := make([][]interface{}, 0, len(segIndexes))
|
|
||||||
for _, segIdx := range segIndexes {
|
|
||||||
in := []interface{}{segIdx.SegmentID, segIdx.IndexID}
|
|
||||||
inValues = append(inValues, in)
|
|
||||||
}
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.SegmentIndex{}).Where("tenant_id = ? AND (segment_id, index_id) IN ?", tenantID, inValues).Updates(dbmodel.SegmentIndex{
|
|
||||||
IsDeleted: true,
|
|
||||||
}).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("update segment_indexes deleted failed", zap.String("tenant", tenantID), zap.Any("segmentIDIndexID", inValues), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *segmentIndexDb) MarkDeletedByCollectionID(tenantID string, collID typeutil.UniqueID) error {
|
|
||||||
err := s.db.Model(&dbmodel.SegmentIndex{}).Where("tenant_id = ? AND collection_id = ?", tenantID, collID).Updates(dbmodel.SegmentIndex{
|
|
||||||
IsDeleted: true,
|
|
||||||
}).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("update segment_indexes deleted by collection id failed", zap.String("tenant", tenantID), zap.Int64("collectionID", collID), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *segmentIndexDb) MarkDeletedByBuildID(tenantID string, buildID typeutil.UniqueID) error {
|
|
||||||
err := s.db.Model(&dbmodel.SegmentIndex{}).Where("tenant_id = ? AND build_id = ?", tenantID, buildID).Updates(dbmodel.SegmentIndex{
|
|
||||||
IsDeleted: true,
|
|
||||||
}).Error
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Error("update segment_indexes deleted by index id failed", zap.String("tenant", tenantID), zap.Int64("buildID", buildID), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@ -1,248 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSegmentIndex_Insert(t *testing.T) {
|
|
||||||
var segIndexes = []*dbmodel.SegmentIndex{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
CollectionID: collID1,
|
|
||||||
PartitionID: partitionID1,
|
|
||||||
SegmentID: segmentID1,
|
|
||||||
NumRows: NumRows,
|
|
||||||
IndexID: indexID1,
|
|
||||||
BuildID: 1002,
|
|
||||||
NodeID: 3,
|
|
||||||
IndexVersion: 0,
|
|
||||||
IndexState: 3,
|
|
||||||
FailReason: "",
|
|
||||||
CreateTime: uint64(1011),
|
|
||||||
IndexFileKeys: "",
|
|
||||||
IndexSize: 1024,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `segment_indexes` (`tenant_id`,`collection_id`,`partition_id`,`segment_id`,`num_rows`,`index_id`,`build_id`,`node_id`,`index_version`,`index_state`,`fail_reason`,`create_time`,`index_file_keys`,`index_size`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(segIndexes[0].TenantID, segIndexes[0].CollectionID, segIndexes[0].PartitionID, segIndexes[0].SegmentID, segIndexes[0].NumRows, segIndexes[0].IndexID, segIndexes[0].BuildID, segIndexes[0].NodeID, segIndexes[0].IndexVersion, segIndexes[0].IndexState, segIndexes[0].FailReason, segIndexes[0].CreateTime, segIndexes[0].IndexFileKeys, segIndexes[0].IndexSize, segIndexes[0].IsDeleted, segIndexes[0].CreatedAt, segIndexes[0].UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.Insert(segIndexes)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_Insert_Error(t *testing.T) {
|
|
||||||
var segIndexes = []*dbmodel.SegmentIndex{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
CollectionID: collID1,
|
|
||||||
PartitionID: partitionID1,
|
|
||||||
SegmentID: segmentID1,
|
|
||||||
NumRows: NumRows,
|
|
||||||
IndexID: indexID1,
|
|
||||||
BuildID: 1002,
|
|
||||||
NodeID: 3,
|
|
||||||
IndexVersion: 0,
|
|
||||||
IndexState: 3,
|
|
||||||
FailReason: "",
|
|
||||||
CreateTime: uint64(1011),
|
|
||||||
IndexFileKeys: "",
|
|
||||||
IndexSize: 1024,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `segment_indexes` (`tenant_id`,`collection_id`,`partition_id`,`segment_id`,`num_rows`,`index_id`,`build_id`,`node_id`,`index_version`,`index_state`,`fail_reason`,`create_time`,`index_file_keys`,`index_size`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(segIndexes[0].TenantID, segIndexes[0].CollectionID, segIndexes[0].PartitionID, segIndexes[0].SegmentID, segIndexes[0].NumRows, segIndexes[0].IndexID, segIndexes[0].BuildID, segIndexes[0].NodeID, segIndexes[0].IndexVersion, segIndexes[0].IndexState, segIndexes[0].FailReason, segIndexes[0].CreateTime, segIndexes[0].IndexFileKeys, segIndexes[0].IndexSize, segIndexes[0].IsDeleted, segIndexes[0].CreatedAt, segIndexes[0].UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.Insert(segIndexes)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_Update(t *testing.T) {
|
|
||||||
var segIndexes = []*dbmodel.SegmentIndex{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
CollectionID: collID1,
|
|
||||||
PartitionID: partitionID1,
|
|
||||||
SegmentID: segmentID1,
|
|
||||||
NumRows: NumRows,
|
|
||||||
IndexID: indexID1,
|
|
||||||
BuildID: 1002,
|
|
||||||
NodeID: 3,
|
|
||||||
IndexVersion: 0,
|
|
||||||
IndexState: 3,
|
|
||||||
FailReason: "",
|
|
||||||
CreateTime: uint64(1011),
|
|
||||||
IndexFileKeys: "",
|
|
||||||
IndexSize: 1024,
|
|
||||||
IsDeleted: true,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `segment_indexes` (`tenant_id`,`collection_id`,`partition_id`,`segment_id`,`num_rows`,`index_id`,`build_id`,`node_id`,`index_version`,`index_state`,`fail_reason`,`create_time`,`index_file_keys`,`index_size`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(segIndexes[0].TenantID, segIndexes[0].CollectionID, segIndexes[0].PartitionID, segIndexes[0].SegmentID, segIndexes[0].NumRows, segIndexes[0].IndexID, segIndexes[0].BuildID, segIndexes[0].NodeID, segIndexes[0].IndexVersion, segIndexes[0].IndexState, segIndexes[0].FailReason, segIndexes[0].CreateTime, segIndexes[0].IndexFileKeys, segIndexes[0].IndexSize, segIndexes[0].IsDeleted, segIndexes[0].CreatedAt, segIndexes[0].UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.Update(segIndexes[0])
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_Upsert_Error(t *testing.T) {
|
|
||||||
var segIndexes = []*dbmodel.SegmentIndex{
|
|
||||||
{
|
|
||||||
TenantID: tenantID,
|
|
||||||
CollectionID: collID1,
|
|
||||||
PartitionID: partitionID1,
|
|
||||||
SegmentID: segmentID1,
|
|
||||||
NumRows: NumRows,
|
|
||||||
IndexID: indexID1,
|
|
||||||
BuildID: 1002,
|
|
||||||
NodeID: 3,
|
|
||||||
IndexVersion: 0,
|
|
||||||
IndexState: 3,
|
|
||||||
FailReason: "",
|
|
||||||
CreateTime: uint64(1011),
|
|
||||||
IndexFileKeys: "",
|
|
||||||
IndexSize: 1024,
|
|
||||||
IsDeleted: true,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `segment_indexes` (`tenant_id`,`collection_id`,`partition_id`,`segment_id`,`num_rows`,`index_id`,`build_id`,`node_id`,`index_version`,`index_state`,`fail_reason`,`create_time`,`index_file_keys`,`index_size`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(segIndexes[0].TenantID, segIndexes[0].CollectionID, segIndexes[0].PartitionID, segIndexes[0].SegmentID, segIndexes[0].NumRows, segIndexes[0].IndexID, segIndexes[0].BuildID, segIndexes[0].NodeID, segIndexes[0].IndexVersion, segIndexes[0].IndexState, segIndexes[0].FailReason, segIndexes[0].CreateTime, segIndexes[0].IndexFileKeys, segIndexes[0].IndexSize, segIndexes[0].IsDeleted, segIndexes[0].CreatedAt, segIndexes[0].UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.Update(segIndexes[0])
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_MarkDeleted(t *testing.T) {
|
|
||||||
var segIndexes = []*dbmodel.SegmentIndex{
|
|
||||||
{
|
|
||||||
SegmentID: segmentID1,
|
|
||||||
IndexID: indexID1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
SegmentID: segmentID2,
|
|
||||||
IndexID: indexID2,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `segment_indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND (segment_id, index_id) IN ((?,?),(?,?))").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, segIndexes[0].SegmentID, segIndexes[0].IndexID, segIndexes[1].SegmentID, segIndexes[1].IndexID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.MarkDeleted(tenantID, segIndexes)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_MarkDeleted_Error(t *testing.T) {
|
|
||||||
var segIndexes = []*dbmodel.SegmentIndex{
|
|
||||||
{
|
|
||||||
SegmentID: segmentID1,
|
|
||||||
IndexID: indexID1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `segment_indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND (segment_id, index_id) IN ((?,?))").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, segIndexes[0].SegmentID, segIndexes[0].IndexID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.MarkDeleted(tenantID, segIndexes)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_MarkDeletedByCollID(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `segment_indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND collection_id = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, collID1).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.MarkDeletedByCollectionID(tenantID, collID1)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_MarkDeletedByCollID_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `segment_indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND collection_id = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, collID1).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.MarkDeletedByCollectionID(tenantID, collID1)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_MarkDeletedByBuildID(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `segment_indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND build_id = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, indexBuildID1).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.MarkDeletedByBuildID(tenantID, indexBuildID1)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSegmentIndex_MarkDeletedByIdxID_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `segment_indexes` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND build_id = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, indexBuildID1).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := segIndexTestDb.MarkDeletedByBuildID(tenantID, indexBuildID1)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,75 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/common"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type userDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *userDb) GetByUsername(tenantID string, username string) (*dbmodel.User, error) {
|
|
||||||
var r *dbmodel.User
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.User{}).Where("tenant_id = ? AND username = ? AND is_deleted = false", tenantID, username).Take(&r).Error
|
|
||||||
|
|
||||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
|
||||||
return nil, common.NewKeyNotExistError(fmt.Sprintf("%s/%s", tenantID, username))
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
log.Error("get user by username failed", zap.String("tenant", tenantID), zap.String("username", username), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *userDb) ListUser(tenantID string) ([]*dbmodel.User, error) {
|
|
||||||
var users []*dbmodel.User
|
|
||||||
|
|
||||||
err := s.db.Model(&dbmodel.User{}).Where("tenant_id = ? AND is_deleted = false", tenantID).Find(&users).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("list user failed", zap.String("tenant", tenantID), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return users, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *userDb) Insert(in *dbmodel.User) error {
|
|
||||||
err := s.db.Create(in).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("insert credential_users failed", zap.String("tenant", in.TenantID), zap.String("username", in.Username), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *userDb) MarkDeletedByUsername(tenantID string, username string) error {
|
|
||||||
err := s.db.Model(&dbmodel.User{}).Where("tenant_id = ? AND username = ?", tenantID, username).Update("is_deleted", true).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("update credential_users is_deleted=true failed", zap.String("tenant", tenantID), zap.String("username", username), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *userDb) UpdatePassword(tenantID string, username string, encryptedPassword string) error {
|
|
||||||
err := s.db.Model(&dbmodel.User{}).Where("tenant_id = ? AND username = ?", tenantID, username).Update("encrypted_password", encryptedPassword).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("update password by username failed", zap.String("tenant", tenantID), zap.String("username", username), zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type userRoleDb struct {
|
|
||||||
db *gorm.DB
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u *userRoleDb) GetUserRoles(tenantID string, userID int64, roleID int64) ([]*dbmodel.UserRole, error) {
|
|
||||||
var (
|
|
||||||
userRoles []*dbmodel.UserRole
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
err = u.db.Model(&dbmodel.UserRole{}).
|
|
||||||
Where(&dbmodel.UserRole{UserID: userID, RoleID: roleID}).
|
|
||||||
Where(dbmodel.GetCommonCondition(tenantID, false)).
|
|
||||||
Preload("User").Preload("Role").
|
|
||||||
Find(&userRoles).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to get user-roles", zap.String("tenant_id", tenantID), zap.Int64("userID", userID), zap.Int64("roleID", roleID), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return userRoles, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u *userRoleDb) Insert(in *dbmodel.UserRole) error {
|
|
||||||
err := u.db.Create(in).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to insert the user-role", zap.Any("in", in), zap.Error(err))
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u *userRoleDb) Delete(tenantID string, userID int64, roleID int64) error {
|
|
||||||
err := u.db.Model(dbmodel.UserRole{}).
|
|
||||||
Where(&dbmodel.UserRole{UserID: userID, RoleID: roleID}).
|
|
||||||
Where(dbmodel.GetCommonCondition(tenantID, false)).
|
|
||||||
Update("is_deleted", true).Error
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to delete the user-role", zap.String("tenant_id", tenantID), zap.Int64("userID", userID), zap.Int64("roleID", roleID), zap.Error(err))
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
@ -1,201 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestUserRole_GetUserRoles(t *testing.T) {
|
|
||||||
var (
|
|
||||||
userID1 = 1
|
|
||||||
userID2 = 2
|
|
||||||
roleID1 = 10
|
|
||||||
roleID2 = 20
|
|
||||||
userRoles []*dbmodel.UserRole
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
// mock user and role
|
|
||||||
mock.ExpectQuery("SELECT * FROM `user_role` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "user_id", "role_id"}).
|
|
||||||
AddRow(tenantID, userID1, roleID1).
|
|
||||||
AddRow(tenantID, userID2, roleID2))
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `role`.`id` IN (?,?)").
|
|
||||||
WithArgs(roleID1, roleID2).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "name"}).
|
|
||||||
AddRow(roleID1, tenantID, "foo1").
|
|
||||||
AddRow(roleID2, tenantID, "foo2"))
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE `credential_users`.`id` IN (?,?)").
|
|
||||||
WithArgs(userID1, userID2).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "username"}).
|
|
||||||
AddRow(userID1, tenantID, "fo1").
|
|
||||||
AddRow(userID2, tenantID, "fo2"))
|
|
||||||
|
|
||||||
userRoles, err = userRoleTestDb.GetUserRoles(tenantID, 0, 0)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(userRoles))
|
|
||||||
assert.Equal(t, "foo1", userRoles[0].Role.Name)
|
|
||||||
assert.Equal(t, "fo1", userRoles[0].User.Username)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `user_role` WHERE `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(false, tenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
_, err = userRoleTestDb.GetUserRoles(tenantID, 0, 0)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUserRole_GetUserRolesWithUserID(t *testing.T) {
|
|
||||||
var (
|
|
||||||
userID1 = 1
|
|
||||||
roleID1 = 10
|
|
||||||
roleID2 = 20
|
|
||||||
userRoles []*dbmodel.UserRole
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `user_role` WHERE `user_role`.`user_id` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(userID1, false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "user_id", "role_id"}).
|
|
||||||
AddRow(tenantID, userID1, roleID1).
|
|
||||||
AddRow(tenantID, userID1, roleID2))
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `role`.`id` IN (?,?)").
|
|
||||||
WithArgs(roleID1, roleID2).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "name"}).
|
|
||||||
AddRow(roleID1, tenantID, "foo1").
|
|
||||||
AddRow(roleID2, tenantID, "foo2"))
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE `credential_users`.`id` = ?").
|
|
||||||
WithArgs(userID1).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "username"}).
|
|
||||||
AddRow(userID1, tenantID, "fo1"))
|
|
||||||
|
|
||||||
userRoles, err = userRoleTestDb.GetUserRoles(tenantID, int64(userID1), 0)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(userRoles))
|
|
||||||
assert.Equal(t, "foo2", userRoles[1].Role.Name)
|
|
||||||
assert.Equal(t, "fo1", userRoles[0].User.Username)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUserRole_GetUserRolesWithRoleID(t *testing.T) {
|
|
||||||
var (
|
|
||||||
userID1 = 1
|
|
||||||
userID2 = 2
|
|
||||||
roleID1 = 10
|
|
||||||
userRoles []*dbmodel.UserRole
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
mock.ExpectQuery("SELECT * FROM `user_role` WHERE `user_role`.`role_id` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(roleID1, false, tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "user_id", "role_id"}).
|
|
||||||
AddRow(tenantID, userID1, roleID1).
|
|
||||||
AddRow(tenantID, userID2, roleID1))
|
|
||||||
mock.ExpectQuery("SELECT * FROM `role` WHERE `role`.`id` = ?").
|
|
||||||
WithArgs(roleID1).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "name"}).
|
|
||||||
AddRow(roleID1, tenantID, "foo1"))
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE `credential_users`.`id` IN (?,?)").
|
|
||||||
WithArgs(userID1, userID2).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"id", "tenant_id", "username"}).
|
|
||||||
AddRow(userID1, tenantID, "fo1").
|
|
||||||
AddRow(userID2, tenantID, "fo2"))
|
|
||||||
|
|
||||||
userRoles, err = userRoleTestDb.GetUserRoles(tenantID, 0, int64(roleID1))
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(userRoles))
|
|
||||||
assert.Equal(t, "foo1", userRoles[0].Role.Name)
|
|
||||||
assert.Equal(t, "fo2", userRoles[1].User.Username)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUserRole_Insert(t *testing.T) {
|
|
||||||
var (
|
|
||||||
userRole *dbmodel.UserRole
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
userRole = &dbmodel.UserRole{
|
|
||||||
Base: GetBase(),
|
|
||||||
UserID: 1,
|
|
||||||
RoleID: 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `user_role` (`tenant_id`,`is_deleted`,`created_at`,`updated_at`,`user_id`,`role_id`) VALUES (?,?,?,?,?,?)").
|
|
||||||
WithArgs(userRole.TenantID, userRole.IsDeleted, userRole.CreatedAt, userRole.UpdatedAt, userRole.UserID, userRole.RoleID).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
err = userRoleTestDb.Insert(userRole)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUserRole_InsertError(t *testing.T) {
|
|
||||||
var (
|
|
||||||
userRole *dbmodel.UserRole
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
userRole = &dbmodel.UserRole{
|
|
||||||
Base: GetBase(),
|
|
||||||
UserID: 1,
|
|
||||||
RoleID: 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `user_role` (`tenant_id`,`is_deleted`,`created_at`,`updated_at`,`user_id`,`role_id`) VALUES (?,?,?,?,?,?)").
|
|
||||||
WithArgs(userRole.TenantID, userRole.IsDeleted, userRole.CreatedAt, userRole.UpdatedAt, userRole.UserID, userRole.RoleID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
err = userRoleTestDb.Insert(userRole)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUserRole_Delete(t *testing.T) {
|
|
||||||
var (
|
|
||||||
userRole *dbmodel.UserRole
|
|
||||||
getExec func() *sqlmock.ExpectedExec
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
userRole = &dbmodel.UserRole{
|
|
||||||
Base: GetBase(),
|
|
||||||
UserID: 1,
|
|
||||||
RoleID: 1,
|
|
||||||
}
|
|
||||||
getExec = func() *sqlmock.ExpectedExec {
|
|
||||||
return mock.ExpectExec("UPDATE `user_role` SET `is_deleted`=?,`updated_at`=? WHERE `user_role`.`user_id` = ? AND `user_role`.`role_id` = ? AND `is_deleted` = ? AND `tenant_id` = ?").
|
|
||||||
WithArgs(true, AnyTime{}, userRole.UserID, userRole.RoleID, userRole.IsDeleted, userRole.TenantID)
|
|
||||||
}
|
|
||||||
mock.ExpectBegin()
|
|
||||||
getExec().WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
err = userRoleTestDb.Delete(userRole.TenantID, userRole.UserID, userRole.RoleID)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
mock.ExpectBegin()
|
|
||||||
getExec().WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
err = userRoleTestDb.Delete(userRole.TenantID, userRole.UserID, userRole.RoleID)
|
|
||||||
mock.MatchExpectationsInOrder(false)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,212 +0,0 @@
|
|||||||
package dao
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/cockroachdb/errors"
|
|
||||||
|
|
||||||
"github.com/DATA-DOG/go-sqlmock"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestUser_GetByUsername(t *testing.T) {
|
|
||||||
username := "test_username_1"
|
|
||||||
var user = &dbmodel.User{
|
|
||||||
TenantID: tenantID,
|
|
||||||
Username: username,
|
|
||||||
EncryptedPassword: "xxx",
|
|
||||||
IsSuper: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE tenant_id = ? AND username = ? AND is_deleted = false LIMIT 1").
|
|
||||||
WithArgs(tenantID, username).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "username", "encrypted_password", "is_super"}).
|
|
||||||
AddRow(user.TenantID, user.Username, user.EncryptedPassword, user.IsSuper))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := userTestDb.GetByUsername(tenantID, username)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, user, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_GetByUsername_ErrRecordNotFound(t *testing.T) {
|
|
||||||
username := "test_username_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE tenant_id = ? AND username = ? AND is_deleted = false LIMIT 1").
|
|
||||||
WithArgs(tenantID, username).
|
|
||||||
WillReturnError(gorm.ErrRecordNotFound)
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := userTestDb.GetByUsername(tenantID, username)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_GetByUsername_Error(t *testing.T) {
|
|
||||||
username := "test_username_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE tenant_id = ? AND username = ? AND is_deleted = false LIMIT 1").
|
|
||||||
WithArgs(tenantID, username).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := userTestDb.GetByUsername(tenantID, username)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_ListUsername(t *testing.T) {
|
|
||||||
var (
|
|
||||||
usernames = []string{
|
|
||||||
"test_username_1",
|
|
||||||
"test_username_2",
|
|
||||||
}
|
|
||||||
user = &dbmodel.User{
|
|
||||||
TenantID: tenantID,
|
|
||||||
EncryptedPassword: "xxx",
|
|
||||||
IsSuper: false,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE tenant_id = ? AND is_deleted = false").
|
|
||||||
WithArgs(tenantID).
|
|
||||||
WillReturnRows(
|
|
||||||
sqlmock.NewRows([]string{"tenant_id", "username", "encrypted_password", "is_super"}).
|
|
||||||
AddRow(user.TenantID, usernames[0], user.EncryptedPassword, user.IsSuper).
|
|
||||||
AddRow(user.TenantID, usernames[1], user.EncryptedPassword, user.IsSuper))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := userTestDb.ListUser(tenantID)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, 2, len(res))
|
|
||||||
assert.Equal(t, usernames[0], res[0].Username)
|
|
||||||
assert.Equal(t, usernames[1], res[1].Username)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_ListUsername_Error(t *testing.T) {
|
|
||||||
// expectation
|
|
||||||
mock.ExpectQuery("SELECT * FROM `credential_users` WHERE tenant_id = ? AND is_deleted = false").
|
|
||||||
WithArgs(tenantID).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
|
|
||||||
// actual
|
|
||||||
res, err := userTestDb.ListUser(tenantID)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_Insert(t *testing.T) {
|
|
||||||
var user = &dbmodel.User{
|
|
||||||
TenantID: tenantID,
|
|
||||||
Username: "test_username",
|
|
||||||
EncryptedPassword: "xxx",
|
|
||||||
IsSuper: false,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `credential_users` (`tenant_id`,`username`,`encrypted_password`,`is_super`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(user.TenantID, user.Username, user.EncryptedPassword, user.IsSuper, user.IsDeleted, user.CreatedAt, user.UpdatedAt).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := userTestDb.Insert(user)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_Insert_Error(t *testing.T) {
|
|
||||||
var user = &dbmodel.User{
|
|
||||||
TenantID: tenantID,
|
|
||||||
Username: "test_username",
|
|
||||||
EncryptedPassword: "xxx",
|
|
||||||
IsSuper: false,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: time.Now(),
|
|
||||||
UpdatedAt: time.Now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("INSERT INTO `credential_users` (`tenant_id`,`username`,`encrypted_password`,`is_super`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?)").
|
|
||||||
WithArgs(user.TenantID, user.Username, user.EncryptedPassword, user.IsSuper, user.IsDeleted, user.CreatedAt, user.UpdatedAt).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := userTestDb.Insert(user)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_MarkDeletedByUsername(t *testing.T) {
|
|
||||||
username := "test_username_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `credential_users` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND username = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, username).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := userTestDb.MarkDeletedByUsername(tenantID, username)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_MarkDeletedByUsername_Error(t *testing.T) {
|
|
||||||
username := "test_username_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `credential_users` SET `is_deleted`=?,`updated_at`=? WHERE tenant_id = ? AND username = ?").
|
|
||||||
WithArgs(true, AnyTime{}, tenantID, username).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := userTestDb.MarkDeletedByUsername(tenantID, username)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_UpdatePassword(t *testing.T) {
|
|
||||||
username := "test_username_1"
|
|
||||||
encryptedPassword := "test_encrypted_password_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `credential_users` SET `encrypted_password`=?,`updated_at`=? WHERE tenant_id = ? AND username = ?").
|
|
||||||
WithArgs(encryptedPassword, AnyTime{}, tenantID, username).
|
|
||||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
|
||||||
mock.ExpectCommit()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := userTestDb.UpdatePassword(tenantID, username, encryptedPassword)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUser_UpdatePassword_Error(t *testing.T) {
|
|
||||||
username := "test_username_1"
|
|
||||||
encryptedPassword := "test_encrypted_password_1"
|
|
||||||
|
|
||||||
// expectation
|
|
||||||
mock.ExpectBegin()
|
|
||||||
mock.ExpectExec("UPDATE `credential_users` SET `encrypted_password`=?,`updated_at`=? WHERE tenant_id = ? AND username = ?").
|
|
||||||
WithArgs(encryptedPassword, AnyTime{}, tenantID, username).
|
|
||||||
WillReturnError(errors.New("test error"))
|
|
||||||
mock.ExpectRollback()
|
|
||||||
|
|
||||||
// actual
|
|
||||||
err := userTestDb.UpdatePassword(tenantID, username, encryptedPassword)
|
|
||||||
assert.Error(t, err)
|
|
||||||
}
|
|
||||||
@ -1,119 +0,0 @@
|
|||||||
package dbcore
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"go.uber.org/zap"
|
|
||||||
"gorm.io/driver/mysql"
|
|
||||||
"gorm.io/gorm"
|
|
||||||
"gorm.io/gorm/logger"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/paramtable"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
globalDB *gorm.DB
|
|
||||||
)
|
|
||||||
|
|
||||||
func Connect(cfg *paramtable.MetaDBConfig) error {
|
|
||||||
// load config
|
|
||||||
dsn := fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?charset=utf8mb4&parseTime=True&loc=Local",
|
|
||||||
cfg.Username.GetValue(), cfg.Password.GetValue(), cfg.Address.GetValue(), cfg.Port.GetAsInt(), cfg.DBName.GetValue())
|
|
||||||
|
|
||||||
var ormLogger logger.Interface
|
|
||||||
if log.Level().String() == "debug" {
|
|
||||||
ormLogger = logger.Default.LogMode(logger.Info)
|
|
||||||
} else {
|
|
||||||
ormLogger = logger.Default
|
|
||||||
}
|
|
||||||
|
|
||||||
db, err := gorm.Open(mysql.Open(dsn), &gorm.Config{
|
|
||||||
Logger: ormLogger,
|
|
||||||
CreateBatchSize: 100,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to connect db",
|
|
||||||
zap.String("host", cfg.Address.GetValue()),
|
|
||||||
zap.Int("port", cfg.Port.GetAsInt()),
|
|
||||||
zap.String("database", cfg.DBName.GetValue()),
|
|
||||||
zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
idb, err := db.DB()
|
|
||||||
if err != nil {
|
|
||||||
log.Error("fail to create db instance",
|
|
||||||
zap.String("host", cfg.Address.GetValue()),
|
|
||||||
zap.Int("port", cfg.Port.GetAsInt()),
|
|
||||||
zap.String("database", cfg.DBName.GetValue()),
|
|
||||||
zap.Error(err))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
idb.SetMaxIdleConns(cfg.MaxIdleConns.GetAsInt())
|
|
||||||
idb.SetMaxOpenConns(cfg.MaxOpenConns.GetAsInt())
|
|
||||||
|
|
||||||
globalDB = db
|
|
||||||
|
|
||||||
log.Info("db connected success",
|
|
||||||
zap.String("host", cfg.Address.GetValue()),
|
|
||||||
zap.Int("port", cfg.Port.GetAsInt()),
|
|
||||||
zap.String("database", cfg.DBName.GetValue()),
|
|
||||||
zap.Error(err))
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetGlobalDB Only for test
|
|
||||||
func SetGlobalDB(db *gorm.DB) {
|
|
||||||
globalDB = db
|
|
||||||
}
|
|
||||||
|
|
||||||
type ctxTransactionKey struct{}
|
|
||||||
|
|
||||||
func CtxWithTransaction(ctx context.Context, tx *gorm.DB) context.Context {
|
|
||||||
if ctx == nil {
|
|
||||||
ctx = context.Background()
|
|
||||||
}
|
|
||||||
return context.WithValue(ctx, ctxTransactionKey{}, tx)
|
|
||||||
}
|
|
||||||
|
|
||||||
type txImpl struct{}
|
|
||||||
|
|
||||||
func NewTxImpl() *txImpl {
|
|
||||||
return &txImpl{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*txImpl) Transaction(ctx context.Context, fn func(txctx context.Context) error) error {
|
|
||||||
db := globalDB.WithContext(ctx)
|
|
||||||
|
|
||||||
return db.Transaction(func(tx *gorm.DB) error {
|
|
||||||
txCtx := CtxWithTransaction(ctx, tx)
|
|
||||||
return fn(txCtx)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetDB(ctx context.Context) *gorm.DB {
|
|
||||||
iface := ctx.Value(ctxTransactionKey{})
|
|
||||||
|
|
||||||
if iface != nil {
|
|
||||||
tx, ok := iface.(*gorm.DB)
|
|
||||||
if !ok {
|
|
||||||
log.Error("unexpect context value type", zap.Any("type", reflect.TypeOf(tx)))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return tx
|
|
||||||
}
|
|
||||||
|
|
||||||
return globalDB.WithContext(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
//type CommonModel struct {
|
|
||||||
// ID string `gorm:"primary_key"`
|
|
||||||
// IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
// CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
// UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
//}
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import "time"
|
|
||||||
|
|
||||||
type Base struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
@ -1,107 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/model"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Collection struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
CollectionID int64 `gorm:"collection_id"`
|
|
||||||
CollectionName string `gorm:"collection_name"`
|
|
||||||
Description string `gorm:"description"`
|
|
||||||
AutoID bool `gorm:"auto_id"`
|
|
||||||
ShardsNum int32 `gorm:"shards_num"`
|
|
||||||
StartPosition string `gorm:"start_position"`
|
|
||||||
ConsistencyLevel int32 `gorm:"consistency_level"`
|
|
||||||
Status int32 `gorm:"status"`
|
|
||||||
Properties string `gorm:"properties"`
|
|
||||||
Ts typeutil.Timestamp `gorm:"ts"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v Collection) TableName() string {
|
|
||||||
return "collections"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=ICollectionDb
|
|
||||||
type ICollectionDb interface {
|
|
||||||
// GetCollectionIdTs get the largest timestamp that less than or equal to param ts, no matter is_deleted is true or false.
|
|
||||||
GetCollectionIDTs(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) (*Collection, error)
|
|
||||||
ListCollectionIDTs(tenantID string, ts typeutil.Timestamp) ([]*Collection, error)
|
|
||||||
Get(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) (*Collection, error)
|
|
||||||
GetCollectionIDByName(tenantID string, collectionName string, ts typeutil.Timestamp) (typeutil.UniqueID, error)
|
|
||||||
Insert(in *Collection) error
|
|
||||||
Update(in *Collection) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// model <---> db
|
|
||||||
|
|
||||||
func UnmarshalCollectionModel(coll *Collection) (*model.Collection, error) {
|
|
||||||
var startPositions []*commonpb.KeyDataPair
|
|
||||||
if coll.StartPosition != "" {
|
|
||||||
err := json.Unmarshal([]byte(coll.StartPosition), &startPositions)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("unmarshal collection start positions error", zap.Int64("collectionID", coll.CollectionID), zap.Uint64("ts", coll.Ts), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
properties, err := UnmarshalProperties(coll.Properties)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("unmarshal collection properties error", zap.Int64("collectionID", coll.CollectionID),
|
|
||||||
zap.String("properties", coll.Properties), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Collection{
|
|
||||||
TenantID: coll.TenantID,
|
|
||||||
CollectionID: coll.CollectionID,
|
|
||||||
Name: coll.CollectionName,
|
|
||||||
Description: coll.Description,
|
|
||||||
AutoID: coll.AutoID,
|
|
||||||
ShardsNum: coll.ShardsNum,
|
|
||||||
StartPositions: startPositions,
|
|
||||||
ConsistencyLevel: commonpb.ConsistencyLevel(coll.ConsistencyLevel),
|
|
||||||
CreateTime: coll.Ts,
|
|
||||||
Properties: properties,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func UnmarshalProperties(propertiesStr string) ([]*commonpb.KeyValuePair, error) {
|
|
||||||
if propertiesStr == "" {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var properties []*commonpb.KeyValuePair
|
|
||||||
if propertiesStr != "" {
|
|
||||||
if err := json.Unmarshal([]byte(propertiesStr), &properties); err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to unmarshal properties: %s", err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return properties, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func MarshalProperties(properties []*commonpb.KeyValuePair) (string, error) {
|
|
||||||
if properties == nil {
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
propertiesBytes, err := json.Marshal(properties)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("failed to marshal properties: %s", err.Error())
|
|
||||||
}
|
|
||||||
return string(propertiesBytes), nil
|
|
||||||
}
|
|
||||||
@ -1,30 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type CollectionAlias struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
CollectionID int64 `gorm:"collection_id"`
|
|
||||||
CollectionAlias string `gorm:"collection_alias"`
|
|
||||||
Ts typeutil.Timestamp `gorm:"ts"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v CollectionAlias) TableName() string {
|
|
||||||
return "collection_aliases"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=ICollAliasDb
|
|
||||||
type ICollAliasDb interface {
|
|
||||||
Insert(in []*CollectionAlias) error
|
|
||||||
GetCollectionIDByAlias(tenantID string, alias string, ts typeutil.Timestamp) (typeutil.UniqueID, error)
|
|
||||||
ListCollectionIDTs(tenantID string, ts typeutil.Timestamp) ([]*CollectionAlias, error)
|
|
||||||
List(tenantID string, cidTsPairs []*CollectionAlias) ([]*CollectionAlias, error)
|
|
||||||
}
|
|
||||||
@ -1,40 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type CollectionChannel struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
CollectionID int64 `gorm:"collection_id"`
|
|
||||||
VirtualChannelName string `gorm:"virtual_channel_name"`
|
|
||||||
PhysicalChannelName string `gorm:"physical_channel_name"`
|
|
||||||
Removed bool `gorm:"removed"`
|
|
||||||
Ts typeutil.Timestamp `gorm:"ts"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v CollectionChannel) TableName() string {
|
|
||||||
return "collection_channels"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=ICollChannelDb
|
|
||||||
type ICollChannelDb interface {
|
|
||||||
GetByCollectionID(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) ([]*CollectionChannel, error)
|
|
||||||
Insert(in []*CollectionChannel) error
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExtractChannelNames(channels []*CollectionChannel) ([]string, []string) {
|
|
||||||
vchans := make([]string, 0, len(channels))
|
|
||||||
pchans := make([]string, 0, len(channels))
|
|
||||||
for _, ch := range channels {
|
|
||||||
vchans = append(vchans, ch.VirtualChannelName)
|
|
||||||
pchans = append(pchans, ch.PhysicalChannelName)
|
|
||||||
}
|
|
||||||
return vchans, pchans
|
|
||||||
}
|
|
||||||
@ -1,106 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
|
||||||
"github.com/milvus-io/milvus/pkg/common"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
ts = time.Now()
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestUnmarshalCollectionModel(t *testing.T) {
|
|
||||||
t.Run("Unmarshal start position fail", func(t *testing.T) {
|
|
||||||
collection := &Collection{
|
|
||||||
StartPosition: "{\"error json\":}",
|
|
||||||
}
|
|
||||||
|
|
||||||
ret, err := UnmarshalCollectionModel(collection)
|
|
||||||
assert.Nil(t, ret)
|
|
||||||
assert.Error(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Unmarshal properties fail", func(t *testing.T) {
|
|
||||||
collection := &Collection{
|
|
||||||
Properties: "{\"error json\":}",
|
|
||||||
}
|
|
||||||
|
|
||||||
ret, err := UnmarshalCollectionModel(collection)
|
|
||||||
assert.Nil(t, ret)
|
|
||||||
assert.Error(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Unmarshal collection successfully", func(t *testing.T) {
|
|
||||||
collection := &Collection{
|
|
||||||
TenantID: "",
|
|
||||||
CollectionID: 1,
|
|
||||||
CollectionName: "cn",
|
|
||||||
Description: "",
|
|
||||||
AutoID: false,
|
|
||||||
ShardsNum: common.DefaultShardsNum,
|
|
||||||
StartPosition: "",
|
|
||||||
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
|
|
||||||
Properties: "",
|
|
||||||
Ts: 1,
|
|
||||||
IsDeleted: false,
|
|
||||||
CreatedAt: ts,
|
|
||||||
UpdatedAt: ts,
|
|
||||||
}
|
|
||||||
|
|
||||||
ret, err := UnmarshalCollectionModel(collection)
|
|
||||||
assert.NotNil(t, ret)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "", ret.TenantID)
|
|
||||||
assert.Equal(t, int64(1), ret.CollectionID)
|
|
||||||
assert.Equal(t, "cn", ret.Name)
|
|
||||||
assert.Equal(t, "", ret.Description)
|
|
||||||
assert.Equal(t, false, ret.AutoID)
|
|
||||||
assert.Equal(t, common.DefaultShardsNum, ret.ShardsNum)
|
|
||||||
assert.Equal(t, 0, len(ret.StartPositions))
|
|
||||||
assert.Equal(t, commonpb.ConsistencyLevel(3), ret.ConsistencyLevel)
|
|
||||||
assert.Nil(t, ret.Properties)
|
|
||||||
assert.Equal(t, uint64(1), ret.CreateTime)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnmarshalAndMarshalProperties(t *testing.T) {
|
|
||||||
t.Run("Unmarshal and Marshal empty", func(t *testing.T) {
|
|
||||||
|
|
||||||
ret, err := UnmarshalProperties("")
|
|
||||||
assert.Nil(t, ret)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
ret2, err := MarshalProperties(nil)
|
|
||||||
assert.Empty(t, ret2)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Unmarshal and Marshal fail", func(t *testing.T) {
|
|
||||||
ret, err := UnmarshalProperties("{\"error json\":}")
|
|
||||||
assert.Nil(t, ret)
|
|
||||||
assert.Error(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Unmarshal collection successfully", func(t *testing.T) {
|
|
||||||
properties := []*commonpb.KeyValuePair{
|
|
||||||
{
|
|
||||||
Key: common.CollectionTTLConfigKey,
|
|
||||||
Value: "3600",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
propertiesStr, err := MarshalProperties(properties)
|
|
||||||
assert.NotEmpty(t, propertiesStr)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
ret2, err := UnmarshalProperties(propertiesStr)
|
|
||||||
assert.NotNil(t, ret2)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, ret2, properties)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,30 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import "context"
|
|
||||||
|
|
||||||
//go:generate mockery --name=IMetaDomain
|
|
||||||
type IMetaDomain interface {
|
|
||||||
CollectionDb(ctx context.Context) ICollectionDb
|
|
||||||
FieldDb(ctx context.Context) IFieldDb
|
|
||||||
CollChannelDb(ctx context.Context) ICollChannelDb
|
|
||||||
CollAliasDb(ctx context.Context) ICollAliasDb
|
|
||||||
PartitionDb(ctx context.Context) IPartitionDb
|
|
||||||
IndexDb(ctx context.Context) IIndexDb
|
|
||||||
SegmentIndexDb(ctx context.Context) ISegmentIndexDb
|
|
||||||
UserDb(ctx context.Context) IUserDb
|
|
||||||
RoleDb(ctx context.Context) IRoleDb
|
|
||||||
UserRoleDb(ctx context.Context) IUserRoleDb
|
|
||||||
GrantDb(ctx context.Context) IGrantDb
|
|
||||||
GrantIDDb(ctx context.Context) IGrantIDDb
|
|
||||||
}
|
|
||||||
|
|
||||||
type ITransaction interface {
|
|
||||||
Transaction(ctx context.Context, fn func(txCtx context.Context) error) error
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetCommonCondition(tenant string, isDelete bool) map[string]interface{} {
|
|
||||||
return map[string]interface{}{
|
|
||||||
"tenant_id": tenant,
|
|
||||||
"is_deleted": isDelete,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,91 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
|
||||||
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/model"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/funcutil"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Field struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
FieldID int64 `gorm:"field_id"`
|
|
||||||
FieldName string `gorm:"field_name"`
|
|
||||||
IsPrimaryKey bool `gorm:"is_primary_key"`
|
|
||||||
Description string `gorm:"description"`
|
|
||||||
DataType schemapb.DataType `gorm:"data_type"`
|
|
||||||
TypeParams string `gorm:"type_params"`
|
|
||||||
IndexParams string `gorm:"index_params"`
|
|
||||||
AutoID bool `gorm:"auto_id"`
|
|
||||||
CollectionID int64 `gorm:"collection_id"`
|
|
||||||
Ts typeutil.Timestamp `gorm:"ts"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v Field) TableName() string {
|
|
||||||
return "field_schemas"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=IFieldDb
|
|
||||||
type IFieldDb interface {
|
|
||||||
GetByCollectionID(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) ([]*Field, error)
|
|
||||||
Insert(in []*Field) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// model <---> db
|
|
||||||
|
|
||||||
func UnmarshalFieldModel(fields []*Field) ([]*model.Field, error) {
|
|
||||||
r := make([]*model.Field, 0, len(fields))
|
|
||||||
for _, f := range fields {
|
|
||||||
fd, err := ConvertFieldDBToModel(f)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
r = append(r, fd)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ConvertFieldDBToModel(field *Field) (*model.Field, error) {
|
|
||||||
var typeParams []commonpb.KeyValuePair
|
|
||||||
if field.TypeParams != "" {
|
|
||||||
err := json.Unmarshal([]byte(field.TypeParams), &typeParams)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("unmarshal TypeParams of field failed", zap.Int64("collectionID", field.CollectionID),
|
|
||||||
zap.Int64("fieldID", field.FieldID), zap.String("fieldName", field.FieldName), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var indexParams []commonpb.KeyValuePair
|
|
||||||
if field.IndexParams != "" {
|
|
||||||
err := json.Unmarshal([]byte(field.IndexParams), &indexParams)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("unmarshal IndexParams of field failed", zap.Int64("collectionID", field.CollectionID),
|
|
||||||
zap.Int64("fieldID", field.FieldID), zap.String("fieldName", field.FieldName), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Field{
|
|
||||||
FieldID: field.FieldID,
|
|
||||||
Name: field.FieldName,
|
|
||||||
IsPrimaryKey: field.IsPrimaryKey,
|
|
||||||
Description: field.Description,
|
|
||||||
DataType: field.DataType,
|
|
||||||
TypeParams: funcutil.ConvertToKeyValuePairPointer(typeParams),
|
|
||||||
IndexParams: funcutil.ConvertToKeyValuePairPointer(indexParams),
|
|
||||||
AutoID: field.AutoID,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
type Grant struct {
|
|
||||||
Base
|
|
||||||
RoleID int64 `gorm:"role_id"`
|
|
||||||
Role Role `gorm:"foreignKey:RoleID"`
|
|
||||||
Object string `gorm:"object"`
|
|
||||||
ObjectName string `gorm:"object_name"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Grant) TableName() string {
|
|
||||||
return "grant"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=IGrantDb
|
|
||||||
type IGrantDb interface {
|
|
||||||
GetGrants(tenantID string, roleID int64, object string, objectName string) ([]*Grant, error)
|
|
||||||
Insert(in *Grant) error
|
|
||||||
Delete(tenantID string, roleID int64, object string, objectName string) error
|
|
||||||
}
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
type GrantID struct {
|
|
||||||
Base
|
|
||||||
GrantID int64 `gorm:"grant_id"`
|
|
||||||
Grant Grant `gorm:"foreignKey:GrantID"`
|
|
||||||
Privilege string `gorm:"privilege"`
|
|
||||||
GrantorID int64 `gorm:"grantor_id"`
|
|
||||||
Grantor User `gorm:"foreignKey:GrantorID"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *GrantID) TableName() string {
|
|
||||||
return "grant_id"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=IGrantIDDb
|
|
||||||
type IGrantIDDb interface {
|
|
||||||
GetGrantIDs(tenantID string, grantID int64, privilege string, preloadGrant bool, preloadGrantor bool) ([]*GrantID, error)
|
|
||||||
Insert(in *GrantID) error
|
|
||||||
Delete(tenantID string, grantID int64, privilege string) error
|
|
||||||
}
|
|
||||||
@ -1,127 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/model"
|
|
||||||
"github.com/milvus-io/milvus/pkg/common"
|
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/funcutil"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Index struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
FieldID int64 `gorm:"field_id"`
|
|
||||||
CollectionID int64 `gorm:"collection_id"`
|
|
||||||
IndexID int64 `gorm:"index_id"`
|
|
||||||
IndexName string `gorm:"index_name"`
|
|
||||||
IndexParams string `gorm:"index_params"`
|
|
||||||
TypeParams string `gorm:"type_params"`
|
|
||||||
UserIndexParams string `gorm:"user_index_params"`
|
|
||||||
IsAutoIndex bool `gorm:"is_auto_index"`
|
|
||||||
CreateTime uint64 `gorm:"create_time"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v Index) TableName() string {
|
|
||||||
return "indexes"
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------- search result -------------
|
|
||||||
|
|
||||||
type IndexResult struct {
|
|
||||||
FieldID int64
|
|
||||||
CollectionID int64
|
|
||||||
IndexID int64
|
|
||||||
IndexName string
|
|
||||||
TypeParams string
|
|
||||||
IndexParams string
|
|
||||||
CreateTime uint64
|
|
||||||
IsDeleted bool
|
|
||||||
IsAutoIndex bool
|
|
||||||
UserIndexParams string
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=IIndexDb
|
|
||||||
type IIndexDb interface {
|
|
||||||
Get(tenantID string, collectionID typeutil.UniqueID) ([]*Index, error)
|
|
||||||
List(tenantID string) ([]*IndexResult, error)
|
|
||||||
Insert(in []*Index) error
|
|
||||||
Update(in *Index) error
|
|
||||||
MarkDeletedByCollectionID(tenantID string, collID typeutil.UniqueID) error
|
|
||||||
MarkDeletedByIndexID(tenantID string, idxID typeutil.UniqueID) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// model <---> db
|
|
||||||
|
|
||||||
func UnmarshalIndexModel(inputs []*IndexResult) ([]*model.Index, error) {
|
|
||||||
result := make([]*model.Index, 0, len(inputs))
|
|
||||||
for _, ir := range inputs {
|
|
||||||
var indexParams []commonpb.KeyValuePair
|
|
||||||
if ir.IndexParams != "" {
|
|
||||||
err := json.Unmarshal([]byte(ir.IndexParams), &indexParams)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("unmarshal IndexParams of index failed", zap.Int64("collectionID", ir.CollectionID),
|
|
||||||
zap.Int64("indexID", ir.IndexID), zap.String("indexName", ir.IndexName), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var userIndexParams []commonpb.KeyValuePair
|
|
||||||
if ir.UserIndexParams != "" {
|
|
||||||
err := json.Unmarshal([]byte(ir.UserIndexParams), &userIndexParams)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("unmarshal UserIndexParams of index failed", zap.Int64("collectionID", ir.CollectionID),
|
|
||||||
zap.Int64("indexID", ir.IndexID), zap.String("indexName", ir.IndexName), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var typeParams []commonpb.KeyValuePair
|
|
||||||
if ir.TypeParams != "" {
|
|
||||||
err := json.Unmarshal([]byte(ir.TypeParams), &typeParams)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("unmarshal TypeParams of index failed", zap.Int64("collectionID", ir.CollectionID),
|
|
||||||
zap.Int64("indexID", ir.IndexID), zap.String("indexName", ir.IndexName), zap.Error(err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
idx := &model.Index{
|
|
||||||
CollectionID: ir.CollectionID,
|
|
||||||
FieldID: ir.FieldID,
|
|
||||||
IndexID: ir.IndexID,
|
|
||||||
IndexName: ir.IndexName,
|
|
||||||
IndexParams: funcutil.ConvertToKeyValuePairPointer(indexParams),
|
|
||||||
TypeParams: funcutil.ConvertToKeyValuePairPointer(typeParams),
|
|
||||||
CreateTime: ir.CreateTime,
|
|
||||||
IsDeleted: ir.IsDeleted,
|
|
||||||
IsAutoIndex: ir.IsAutoIndex,
|
|
||||||
UserIndexParams: funcutil.ConvertToKeyValuePairPointer(userIndexParams),
|
|
||||||
}
|
|
||||||
result = append(result, idx)
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ConvertIndexDBToModel(indexes []*Index) []common.Int64Tuple {
|
|
||||||
r := make([]common.Int64Tuple, 0, len(indexes))
|
|
||||||
|
|
||||||
for _, idx := range indexes {
|
|
||||||
tuple := common.Int64Tuple{
|
|
||||||
Key: idx.FieldID,
|
|
||||||
Value: idx.IndexID,
|
|
||||||
}
|
|
||||||
r = append(r, tuple)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
@ -1,109 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ICollAliasDb is an autogenerated mock type for the ICollAliasDb type
|
|
||||||
type ICollAliasDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetCollectionIDByAlias provides a mock function with given fields: tenantID, alias, ts
|
|
||||||
func (_m *ICollAliasDb) GetCollectionIDByAlias(tenantID string, alias string, ts uint64) (int64, error) {
|
|
||||||
ret := _m.Called(tenantID, alias, ts)
|
|
||||||
|
|
||||||
var r0 int64
|
|
||||||
if rf, ok := ret.Get(0).(func(string, string, uint64) int64); ok {
|
|
||||||
r0 = rf(tenantID, alias, ts)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Get(0).(int64)
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, string, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, alias, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *ICollAliasDb) Insert(in []*dbmodel.CollectionAlias) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func([]*dbmodel.CollectionAlias) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// List provides a mock function with given fields: tenantID, cidTsPairs
|
|
||||||
func (_m *ICollAliasDb) List(tenantID string, cidTsPairs []*dbmodel.CollectionAlias) ([]*dbmodel.CollectionAlias, error) {
|
|
||||||
ret := _m.Called(tenantID, cidTsPairs)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.CollectionAlias
|
|
||||||
if rf, ok := ret.Get(0).(func(string, []*dbmodel.CollectionAlias) []*dbmodel.CollectionAlias); ok {
|
|
||||||
r0 = rf(tenantID, cidTsPairs)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.CollectionAlias)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, []*dbmodel.CollectionAlias) error); ok {
|
|
||||||
r1 = rf(tenantID, cidTsPairs)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// ListCollectionIDTs provides a mock function with given fields: tenantID, ts
|
|
||||||
func (_m *ICollAliasDb) ListCollectionIDTs(tenantID string, ts uint64) ([]*dbmodel.CollectionAlias, error) {
|
|
||||||
ret := _m.Called(tenantID, ts)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.CollectionAlias
|
|
||||||
if rf, ok := ret.Get(0).(func(string, uint64) []*dbmodel.CollectionAlias); ok {
|
|
||||||
r0 = rf(tenantID, ts)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.CollectionAlias)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewICollAliasDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewICollAliasDb creates a new instance of ICollAliasDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewICollAliasDb(t mockConstructorTestingTNewICollAliasDb) *ICollAliasDb {
|
|
||||||
mock := &ICollAliasDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,65 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ICollChannelDb is an autogenerated mock type for the ICollChannelDb type
|
|
||||||
type ICollChannelDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetByCollectionID provides a mock function with given fields: tenantID, collectionID, ts
|
|
||||||
func (_m *ICollChannelDb) GetByCollectionID(tenantID string, collectionID int64, ts uint64) ([]*dbmodel.CollectionChannel, error) {
|
|
||||||
ret := _m.Called(tenantID, collectionID, ts)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.CollectionChannel
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, uint64) []*dbmodel.CollectionChannel); ok {
|
|
||||||
r0 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.CollectionChannel)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *ICollChannelDb) Insert(in []*dbmodel.CollectionChannel) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func([]*dbmodel.CollectionChannel) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewICollChannelDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewICollChannelDb creates a new instance of ICollChannelDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewICollChannelDb(t mockConstructorTestingTNewICollChannelDb) *ICollChannelDb {
|
|
||||||
mock := &ICollChannelDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,146 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ICollectionDb is an autogenerated mock type for the ICollectionDb type
|
|
||||||
type ICollectionDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get provides a mock function with given fields: tenantID, collectionID, ts
|
|
||||||
func (_m *ICollectionDb) Get(tenantID string, collectionID int64, ts uint64) (*dbmodel.Collection, error) {
|
|
||||||
ret := _m.Called(tenantID, collectionID, ts)
|
|
||||||
|
|
||||||
var r0 *dbmodel.Collection
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, uint64) *dbmodel.Collection); ok {
|
|
||||||
r0 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(*dbmodel.Collection)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetCollectionIDByName provides a mock function with given fields: tenantID, collectionName, ts
|
|
||||||
func (_m *ICollectionDb) GetCollectionIDByName(tenantID string, collectionName string, ts uint64) (int64, error) {
|
|
||||||
ret := _m.Called(tenantID, collectionName, ts)
|
|
||||||
|
|
||||||
var r0 int64
|
|
||||||
if rf, ok := ret.Get(0).(func(string, string, uint64) int64); ok {
|
|
||||||
r0 = rf(tenantID, collectionName, ts)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Get(0).(int64)
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, string, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, collectionName, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetCollectionIDTs provides a mock function with given fields: tenantID, collectionID, ts
|
|
||||||
func (_m *ICollectionDb) GetCollectionIDTs(tenantID string, collectionID int64, ts uint64) (*dbmodel.Collection, error) {
|
|
||||||
ret := _m.Called(tenantID, collectionID, ts)
|
|
||||||
|
|
||||||
var r0 *dbmodel.Collection
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, uint64) *dbmodel.Collection); ok {
|
|
||||||
r0 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(*dbmodel.Collection)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *ICollectionDb) Insert(in *dbmodel.Collection) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.Collection) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// ListCollectionIDTs provides a mock function with given fields: tenantID, ts
|
|
||||||
func (_m *ICollectionDb) ListCollectionIDTs(tenantID string, ts uint64) ([]*dbmodel.Collection, error) {
|
|
||||||
ret := _m.Called(tenantID, ts)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.Collection
|
|
||||||
if rf, ok := ret.Get(0).(func(string, uint64) []*dbmodel.Collection); ok {
|
|
||||||
r0 = rf(tenantID, ts)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.Collection)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update provides a mock function with given fields: in
|
|
||||||
func (_m *ICollectionDb) Update(in *dbmodel.Collection) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.Collection) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewICollectionDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewICollectionDb creates a new instance of ICollectionDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewICollectionDb(t mockConstructorTestingTNewICollectionDb) *ICollectionDb {
|
|
||||||
mock := &ICollectionDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,65 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IFieldDb is an autogenerated mock type for the IFieldDb type
|
|
||||||
type IFieldDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetByCollectionID provides a mock function with given fields: tenantID, collectionID, ts
|
|
||||||
func (_m *IFieldDb) GetByCollectionID(tenantID string, collectionID int64, ts uint64) ([]*dbmodel.Field, error) {
|
|
||||||
ret := _m.Called(tenantID, collectionID, ts)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.Field
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, uint64) []*dbmodel.Field); ok {
|
|
||||||
r0 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.Field)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *IFieldDb) Insert(in []*dbmodel.Field) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func([]*dbmodel.Field) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIFieldDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIFieldDb creates a new instance of IFieldDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIFieldDb(t mockConstructorTestingTNewIFieldDb) *IFieldDb {
|
|
||||||
mock := &IFieldDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,79 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IGrantDb is an autogenerated mock type for the IGrantDb type
|
|
||||||
type IGrantDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete provides a mock function with given fields: tenantID, roleID, object, objectName
|
|
||||||
func (_m *IGrantDb) Delete(tenantID string, roleID int64, object string, objectName string) error {
|
|
||||||
ret := _m.Called(tenantID, roleID, object, objectName)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, string, string) error); ok {
|
|
||||||
r0 = rf(tenantID, roleID, object, objectName)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetGrants provides a mock function with given fields: tenantID, roleID, object, objectName
|
|
||||||
func (_m *IGrantDb) GetGrants(tenantID string, roleID int64, object string, objectName string) ([]*dbmodel.Grant, error) {
|
|
||||||
ret := _m.Called(tenantID, roleID, object, objectName)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.Grant
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, string, string) []*dbmodel.Grant); ok {
|
|
||||||
r0 = rf(tenantID, roleID, object, objectName)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.Grant)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, string, string) error); ok {
|
|
||||||
r1 = rf(tenantID, roleID, object, objectName)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *IGrantDb) Insert(in *dbmodel.Grant) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.Grant) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIGrantDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIGrantDb creates a new instance of IGrantDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIGrantDb(t mockConstructorTestingTNewIGrantDb) *IGrantDb {
|
|
||||||
mock := &IGrantDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,79 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IGrantIDDb is an autogenerated mock type for the IGrantIDDb type
|
|
||||||
type IGrantIDDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete provides a mock function with given fields: tenantID, grantID, privilege
|
|
||||||
func (_m *IGrantIDDb) Delete(tenantID string, grantID int64, privilege string) error {
|
|
||||||
ret := _m.Called(tenantID, grantID, privilege)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, string) error); ok {
|
|
||||||
r0 = rf(tenantID, grantID, privilege)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetGrantIDs provides a mock function with given fields: tenantID, grantID, privilege, preloadGrant, preloadGrantor
|
|
||||||
func (_m *IGrantIDDb) GetGrantIDs(tenantID string, grantID int64, privilege string, preloadGrant bool, preloadGrantor bool) ([]*dbmodel.GrantID, error) {
|
|
||||||
ret := _m.Called(tenantID, grantID, privilege, preloadGrant, preloadGrantor)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.GrantID
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, string, bool, bool) []*dbmodel.GrantID); ok {
|
|
||||||
r0 = rf(tenantID, grantID, privilege, preloadGrant, preloadGrantor)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.GrantID)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, string, bool, bool) error); ok {
|
|
||||||
r1 = rf(tenantID, grantID, privilege, preloadGrant, preloadGrantor)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *IGrantIDDb) Insert(in *dbmodel.GrantID) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.GrantID) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIGrantIDDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIGrantIDDb creates a new instance of IGrantIDDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIGrantIDDb(t mockConstructorTestingTNewIGrantIDDb) *IGrantIDDb {
|
|
||||||
mock := &IGrantIDDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,130 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IIndexDb is an autogenerated mock type for the IIndexDb type
|
|
||||||
type IIndexDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get provides a mock function with given fields: tenantID, collectionID
|
|
||||||
func (_m *IIndexDb) Get(tenantID string, collectionID int64) ([]*dbmodel.Index, error) {
|
|
||||||
ret := _m.Called(tenantID, collectionID)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.Index
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64) []*dbmodel.Index); ok {
|
|
||||||
r0 = rf(tenantID, collectionID)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.Index)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64) error); ok {
|
|
||||||
r1 = rf(tenantID, collectionID)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *IIndexDb) Insert(in []*dbmodel.Index) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func([]*dbmodel.Index) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// List provides a mock function with given fields: tenantID
|
|
||||||
func (_m *IIndexDb) List(tenantID string) ([]*dbmodel.IndexResult, error) {
|
|
||||||
ret := _m.Called(tenantID)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.IndexResult
|
|
||||||
if rf, ok := ret.Get(0).(func(string) []*dbmodel.IndexResult); ok {
|
|
||||||
r0 = rf(tenantID)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.IndexResult)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string) error); ok {
|
|
||||||
r1 = rf(tenantID)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarkDeletedByCollectionID provides a mock function with given fields: tenantID, collID
|
|
||||||
func (_m *IIndexDb) MarkDeletedByCollectionID(tenantID string, collID int64) error {
|
|
||||||
ret := _m.Called(tenantID, collID)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64) error); ok {
|
|
||||||
r0 = rf(tenantID, collID)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarkDeletedByIndexID provides a mock function with given fields: tenantID, idxID
|
|
||||||
func (_m *IIndexDb) MarkDeletedByIndexID(tenantID string, idxID int64) error {
|
|
||||||
ret := _m.Called(tenantID, idxID)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64) error); ok {
|
|
||||||
r0 = rf(tenantID, idxID)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update provides a mock function with given fields: in
|
|
||||||
func (_m *IIndexDb) Update(in *dbmodel.Index) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.Index) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIIndexDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIIndexDb creates a new instance of IIndexDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIIndexDb(t mockConstructorTestingTNewIIndexDb) *IIndexDb {
|
|
||||||
mock := &IIndexDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,222 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
context "context"
|
|
||||||
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IMetaDomain is an autogenerated mock type for the IMetaDomain type
|
|
||||||
type IMetaDomain struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// CollAliasDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) CollAliasDb(ctx context.Context) dbmodel.ICollAliasDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.ICollAliasDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ICollAliasDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.ICollAliasDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// CollChannelDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) CollChannelDb(ctx context.Context) dbmodel.ICollChannelDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.ICollChannelDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ICollChannelDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.ICollChannelDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// CollectionDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) CollectionDb(ctx context.Context) dbmodel.ICollectionDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.ICollectionDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ICollectionDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.ICollectionDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// FieldDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) FieldDb(ctx context.Context) dbmodel.IFieldDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.IFieldDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IFieldDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.IFieldDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// GrantDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) GrantDb(ctx context.Context) dbmodel.IGrantDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.IGrantDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IGrantDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.IGrantDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// GrantIDDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) GrantIDDb(ctx context.Context) dbmodel.IGrantIDDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.IGrantIDDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IGrantIDDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.IGrantIDDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IndexDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) IndexDb(ctx context.Context) dbmodel.IIndexDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.IIndexDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IIndexDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.IIndexDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// PartitionDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) PartitionDb(ctx context.Context) dbmodel.IPartitionDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.IPartitionDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IPartitionDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.IPartitionDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// RoleDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) RoleDb(ctx context.Context) dbmodel.IRoleDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.IRoleDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IRoleDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.IRoleDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// SegmentIndexDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) SegmentIndexDb(ctx context.Context) dbmodel.ISegmentIndexDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.ISegmentIndexDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ISegmentIndexDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.ISegmentIndexDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// UserDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) UserDb(ctx context.Context) dbmodel.IUserDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.IUserDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IUserDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.IUserDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// UserRoleDb provides a mock function with given fields: ctx
|
|
||||||
func (_m *IMetaDomain) UserRoleDb(ctx context.Context) dbmodel.IUserRoleDb {
|
|
||||||
ret := _m.Called(ctx)
|
|
||||||
|
|
||||||
var r0 dbmodel.IUserRoleDb
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IUserRoleDb); ok {
|
|
||||||
r0 = rf(ctx)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(dbmodel.IUserRoleDb)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIMetaDomain interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIMetaDomain creates a new instance of IMetaDomain. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIMetaDomain(t mockConstructorTestingTNewIMetaDomain) *IMetaDomain {
|
|
||||||
mock := &IMetaDomain{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,79 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IPartitionDb is an autogenerated mock type for the IPartitionDb type
|
|
||||||
type IPartitionDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetByCollectionID provides a mock function with given fields: tenantID, collectionID, ts
|
|
||||||
func (_m *IPartitionDb) GetByCollectionID(tenantID string, collectionID int64, ts uint64) ([]*dbmodel.Partition, error) {
|
|
||||||
ret := _m.Called(tenantID, collectionID, ts)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.Partition
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, uint64) []*dbmodel.Partition); ok {
|
|
||||||
r0 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.Partition)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, uint64) error); ok {
|
|
||||||
r1 = rf(tenantID, collectionID, ts)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *IPartitionDb) Insert(in []*dbmodel.Partition) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func([]*dbmodel.Partition) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update provides a mock function with given fields: in
|
|
||||||
func (_m *IPartitionDb) Update(in *dbmodel.Partition) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.Partition) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIPartitionDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIPartitionDb creates a new instance of IPartitionDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIPartitionDb(t mockConstructorTestingTNewIPartitionDb) *IPartitionDb {
|
|
||||||
mock := &IPartitionDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,79 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IRoleDb is an autogenerated mock type for the IRoleDb type
|
|
||||||
type IRoleDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete provides a mock function with given fields: tenantID, name
|
|
||||||
func (_m *IRoleDb) Delete(tenantID string, name string) error {
|
|
||||||
ret := _m.Called(tenantID, name)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, string) error); ok {
|
|
||||||
r0 = rf(tenantID, name)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetRoles provides a mock function with given fields: tenantID, name
|
|
||||||
func (_m *IRoleDb) GetRoles(tenantID string, name string) ([]*dbmodel.Role, error) {
|
|
||||||
ret := _m.Called(tenantID, name)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.Role
|
|
||||||
if rf, ok := ret.Get(0).(func(string, string) []*dbmodel.Role); ok {
|
|
||||||
r0 = rf(tenantID, name)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.Role)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, string) error); ok {
|
|
||||||
r1 = rf(tenantID, name)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *IRoleDb) Insert(in *dbmodel.Role) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.Role) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIRoleDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIRoleDb creates a new instance of IRoleDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIRoleDb(t mockConstructorTestingTNewIRoleDb) *IRoleDb {
|
|
||||||
mock := &IRoleDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,144 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ISegmentIndexDb is an autogenerated mock type for the ISegmentIndexDb type
|
|
||||||
type ISegmentIndexDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get provides a mock function with given fields: tenantID, collectionID, buildID
|
|
||||||
func (_m *ISegmentIndexDb) Get(tenantID string, collectionID int64, buildID int64) ([]*dbmodel.SegmentIndexResult, error) {
|
|
||||||
ret := _m.Called(tenantID, collectionID, buildID)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.SegmentIndexResult
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, int64) []*dbmodel.SegmentIndexResult); ok {
|
|
||||||
r0 = rf(tenantID, collectionID, buildID)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.SegmentIndexResult)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, int64) error); ok {
|
|
||||||
r1 = rf(tenantID, collectionID, buildID)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *ISegmentIndexDb) Insert(in []*dbmodel.SegmentIndex) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func([]*dbmodel.SegmentIndex) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// List provides a mock function with given fields: tenantID
|
|
||||||
func (_m *ISegmentIndexDb) List(tenantID string) ([]*dbmodel.SegmentIndexResult, error) {
|
|
||||||
ret := _m.Called(tenantID)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.SegmentIndexResult
|
|
||||||
if rf, ok := ret.Get(0).(func(string) []*dbmodel.SegmentIndexResult); ok {
|
|
||||||
r0 = rf(tenantID)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.SegmentIndexResult)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string) error); ok {
|
|
||||||
r1 = rf(tenantID)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarkDeleted provides a mock function with given fields: tenantID, in
|
|
||||||
func (_m *ISegmentIndexDb) MarkDeleted(tenantID string, in []*dbmodel.SegmentIndex) error {
|
|
||||||
ret := _m.Called(tenantID, in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, []*dbmodel.SegmentIndex) error); ok {
|
|
||||||
r0 = rf(tenantID, in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarkDeletedByBuildID provides a mock function with given fields: tenantID, idxID
|
|
||||||
func (_m *ISegmentIndexDb) MarkDeletedByBuildID(tenantID string, idxID int64) error {
|
|
||||||
ret := _m.Called(tenantID, idxID)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64) error); ok {
|
|
||||||
r0 = rf(tenantID, idxID)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarkDeletedByCollectionID provides a mock function with given fields: tenantID, collID
|
|
||||||
func (_m *ISegmentIndexDb) MarkDeletedByCollectionID(tenantID string, collID int64) error {
|
|
||||||
ret := _m.Called(tenantID, collID)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64) error); ok {
|
|
||||||
r0 = rf(tenantID, collID)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update provides a mock function with given fields: in
|
|
||||||
func (_m *ISegmentIndexDb) Update(in *dbmodel.SegmentIndex) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.SegmentIndex) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewISegmentIndexDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewISegmentIndexDb creates a new instance of ISegmentIndexDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewISegmentIndexDb(t mockConstructorTestingTNewISegmentIndexDb) *ISegmentIndexDb {
|
|
||||||
mock := &ISegmentIndexDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,116 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IUserDb is an autogenerated mock type for the IUserDb type
|
|
||||||
type IUserDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetByUsername provides a mock function with given fields: tenantID, username
|
|
||||||
func (_m *IUserDb) GetByUsername(tenantID string, username string) (*dbmodel.User, error) {
|
|
||||||
ret := _m.Called(tenantID, username)
|
|
||||||
|
|
||||||
var r0 *dbmodel.User
|
|
||||||
if rf, ok := ret.Get(0).(func(string, string) *dbmodel.User); ok {
|
|
||||||
r0 = rf(tenantID, username)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).(*dbmodel.User)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, string) error); ok {
|
|
||||||
r1 = rf(tenantID, username)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *IUserDb) Insert(in *dbmodel.User) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.User) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// ListUser provides a mock function with given fields: tenantID
|
|
||||||
func (_m *IUserDb) ListUser(tenantID string) ([]*dbmodel.User, error) {
|
|
||||||
ret := _m.Called(tenantID)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.User
|
|
||||||
if rf, ok := ret.Get(0).(func(string) []*dbmodel.User); ok {
|
|
||||||
r0 = rf(tenantID)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.User)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string) error); ok {
|
|
||||||
r1 = rf(tenantID)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarkDeletedByUsername provides a mock function with given fields: tenantID, username
|
|
||||||
func (_m *IUserDb) MarkDeletedByUsername(tenantID string, username string) error {
|
|
||||||
ret := _m.Called(tenantID, username)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, string) error); ok {
|
|
||||||
r0 = rf(tenantID, username)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdatePassword provides a mock function with given fields: tenantID, username, encryptedPassword
|
|
||||||
func (_m *IUserDb) UpdatePassword(tenantID string, username string, encryptedPassword string) error {
|
|
||||||
ret := _m.Called(tenantID, username, encryptedPassword)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, string, string) error); ok {
|
|
||||||
r0 = rf(tenantID, username, encryptedPassword)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIUserDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIUserDb creates a new instance of IUserDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIUserDb(t mockConstructorTestingTNewIUserDb) *IUserDb {
|
|
||||||
mock := &IUserDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,79 +0,0 @@
|
|||||||
// Code generated by mockery v2.14.0. DO NOT EDIT.
|
|
||||||
|
|
||||||
package mocks
|
|
||||||
|
|
||||||
import (
|
|
||||||
dbmodel "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
|
|
||||||
mock "github.com/stretchr/testify/mock"
|
|
||||||
)
|
|
||||||
|
|
||||||
// IUserRoleDb is an autogenerated mock type for the IUserRoleDb type
|
|
||||||
type IUserRoleDb struct {
|
|
||||||
mock.Mock
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete provides a mock function with given fields: tenantID, userID, roleID
|
|
||||||
func (_m *IUserRoleDb) Delete(tenantID string, userID int64, roleID int64) error {
|
|
||||||
ret := _m.Called(tenantID, userID, roleID)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, int64) error); ok {
|
|
||||||
r0 = rf(tenantID, userID, roleID)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserRoles provides a mock function with given fields: tenantID, userID, roleID
|
|
||||||
func (_m *IUserRoleDb) GetUserRoles(tenantID string, userID int64, roleID int64) ([]*dbmodel.UserRole, error) {
|
|
||||||
ret := _m.Called(tenantID, userID, roleID)
|
|
||||||
|
|
||||||
var r0 []*dbmodel.UserRole
|
|
||||||
if rf, ok := ret.Get(0).(func(string, int64, int64) []*dbmodel.UserRole); ok {
|
|
||||||
r0 = rf(tenantID, userID, roleID)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*dbmodel.UserRole)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(string, int64, int64) error); ok {
|
|
||||||
r1 = rf(tenantID, userID, roleID)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert provides a mock function with given fields: in
|
|
||||||
func (_m *IUserRoleDb) Insert(in *dbmodel.UserRole) error {
|
|
||||||
ret := _m.Called(in)
|
|
||||||
|
|
||||||
var r0 error
|
|
||||||
if rf, ok := ret.Get(0).(func(*dbmodel.UserRole) error); ok {
|
|
||||||
r0 = rf(in)
|
|
||||||
} else {
|
|
||||||
r0 = ret.Error(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockConstructorTestingTNewIUserRoleDb interface {
|
|
||||||
mock.TestingT
|
|
||||||
Cleanup(func())
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewIUserRoleDb creates a new instance of IUserRoleDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
|
|
||||||
func NewIUserRoleDb(t mockConstructorTestingTNewIUserRoleDb) *IUserRoleDb {
|
|
||||||
mock := &IUserRoleDb{}
|
|
||||||
mock.Mock.Test(t)
|
|
||||||
|
|
||||||
t.Cleanup(func() { mock.AssertExpectations(t) })
|
|
||||||
|
|
||||||
return mock
|
|
||||||
}
|
|
||||||
@ -1,54 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/model"
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Partition struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
PartitionID int64 `gorm:"partition_id"`
|
|
||||||
PartitionName string `gorm:"partition_name"`
|
|
||||||
PartitionCreatedTimestamp uint64 `gorm:"partition_created_timestamp"`
|
|
||||||
CollectionID int64 `gorm:"collection_id"`
|
|
||||||
Status int32 `gorm:"status"`
|
|
||||||
Ts typeutil.Timestamp `gorm:"ts"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v Partition) TableName() string {
|
|
||||||
return "partitions"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=IPartitionDb
|
|
||||||
type IPartitionDb interface {
|
|
||||||
GetByCollectionID(tenantID string, collectionID typeutil.UniqueID, ts typeutil.Timestamp) ([]*Partition, error)
|
|
||||||
Insert(in []*Partition) error
|
|
||||||
Update(in *Partition) error
|
|
||||||
//MarkDeleted(tenantID string, collID typeutil.UniqueID) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// model <---> db
|
|
||||||
|
|
||||||
func UnmarshalPartitionModel(partitons []*Partition) []*model.Partition {
|
|
||||||
r := make([]*model.Partition, 0, len(partitons))
|
|
||||||
for _, p := range partitons {
|
|
||||||
partition := ConvertPartitionDBToModel(p)
|
|
||||||
r = append(r, partition)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
|
|
||||||
func ConvertPartitionDBToModel(partiton *Partition) *model.Partition {
|
|
||||||
return &model.Partition{
|
|
||||||
PartitionID: partiton.PartitionID,
|
|
||||||
PartitionName: partiton.PartitionName,
|
|
||||||
PartitionCreatedTimestamp: partiton.PartitionCreatedTimestamp,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb"
|
|
||||||
|
|
||||||
type Role struct {
|
|
||||||
Base
|
|
||||||
Name string `gorm:"name"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Role) TableName() string {
|
|
||||||
return "role"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Role) Unmarshal() *milvuspb.RoleEntity {
|
|
||||||
return &milvuspb.RoleEntity{Name: r.Name}
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=IRoleDb
|
|
||||||
type IRoleDb interface {
|
|
||||||
GetRoles(tenantID string, name string) ([]*Role, error)
|
|
||||||
Insert(in *Role) error
|
|
||||||
Delete(tenantID string, name string) error
|
|
||||||
}
|
|
||||||
@ -1,99 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/pkg/util/typeutil"
|
|
||||||
)
|
|
||||||
|
|
||||||
type SegmentIndex struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
// SegmentIndexInfo (CollectionID & PartitionID & SegmentID & FieldID & IndexID & BuildID & EnableIndex)
|
|
||||||
CollectionID int64 `gorm:"collection_id"`
|
|
||||||
PartitionID int64 `gorm:"partition_id"`
|
|
||||||
SegmentID int64 `gorm:"segment_id"`
|
|
||||||
NumRows int64 `gorm:"num_rows"`
|
|
||||||
// IndexInfo (IndexID & IndexName & IndexParams)
|
|
||||||
IndexID int64 `gorm:"index_id"`
|
|
||||||
BuildID int64 `gorm:"build_id"`
|
|
||||||
NodeID int64 `gorm:"node_id"`
|
|
||||||
IndexVersion int64 `gorm:"index_version"`
|
|
||||||
IndexState int32 `gorm:"index_state"`
|
|
||||||
FailReason string `gorm:"fail_reason"`
|
|
||||||
CreateTime uint64 `gorm:"create_time"`
|
|
||||||
IndexFileKeys string `gorm:"index_file_keys"`
|
|
||||||
IndexSize uint64 `gorm:"index_size"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v SegmentIndex) TableName() string {
|
|
||||||
return "segment_indexes"
|
|
||||||
}
|
|
||||||
|
|
||||||
type SegmentIndexResult struct {
|
|
||||||
CollectionID int64
|
|
||||||
PartitionID int64
|
|
||||||
SegmentID int64
|
|
||||||
NumRows int64
|
|
||||||
IndexID int64
|
|
||||||
BuildID int64
|
|
||||||
NodeID int64
|
|
||||||
IndexVersion int64
|
|
||||||
IndexState int32
|
|
||||||
FailReason string
|
|
||||||
IsDeleted bool
|
|
||||||
CreateTime uint64
|
|
||||||
IndexFileKeys string
|
|
||||||
IndexSize uint64
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=ISegmentIndexDb
|
|
||||||
type ISegmentIndexDb interface {
|
|
||||||
Get(tenantID string, collectionID, buildID typeutil.UniqueID) ([]*SegmentIndexResult, error)
|
|
||||||
List(tenantID string) ([]*SegmentIndexResult, error)
|
|
||||||
Insert(in []*SegmentIndex) error
|
|
||||||
Update(in *SegmentIndex) error
|
|
||||||
MarkDeleted(tenantID string, in []*SegmentIndex) error
|
|
||||||
MarkDeletedByCollectionID(tenantID string, collID typeutil.UniqueID) error
|
|
||||||
MarkDeletedByBuildID(tenantID string, idxID typeutil.UniqueID) error
|
|
||||||
}
|
|
||||||
|
|
||||||
//func UnmarshalSegmentIndexModel(inputs []*SegmentIndexResult) ([]*model.SegmentIndex, error) {
|
|
||||||
// result := make([]*model.SegmentIndex, 0, len(inputs))
|
|
||||||
// for _, ir := range inputs {
|
|
||||||
//
|
|
||||||
// var IndexFileKeys []string
|
|
||||||
// if ir.IndexFileKeys != "" {
|
|
||||||
// err := json.Unmarshal([]byte(ir.IndexFileKeys), &IndexFileKeys)
|
|
||||||
// if err != nil {
|
|
||||||
// log.Error("unmarshal index file paths of segment index failed", zap.Int64("collectionID", ir.CollectionID),
|
|
||||||
// zap.Int64("indexID", ir.IndexID), zap.Int64("segmentID", ir.SegmentID),
|
|
||||||
// zap.Int64("buildID", ir.BuildID), zap.Error(err))
|
|
||||||
// return nil, err
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// idx := &model.SegmentIndex{
|
|
||||||
// SegmentID: ir.SegmentID,
|
|
||||||
// CollectionID: ir.CollectionID,
|
|
||||||
// PartitionID: ir.PartitionID,
|
|
||||||
// NumRows: ir.NumRows,
|
|
||||||
// IndexID: ir.IndexID,
|
|
||||||
// BuildID: ir.BuildID,
|
|
||||||
// NodeID: ir.NodeID,
|
|
||||||
// IndexVersion: ir.IndexVersion,
|
|
||||||
// IndexState: commonpb.IndexState(ir.IndexState),
|
|
||||||
// FailReason: ir.FailReason,
|
|
||||||
// IsDeleted: ir.IsDeleted,
|
|
||||||
// CreateTime: ir.CreateTime,
|
|
||||||
// IndexFileKeys: IndexFileKeys,
|
|
||||||
// IndexSize: ir.IndexSize,
|
|
||||||
// }
|
|
||||||
// result = append(result, idx)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// return result, nil
|
|
||||||
//}
|
|
||||||
@ -1,44 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/model"
|
|
||||||
)
|
|
||||||
|
|
||||||
type User struct {
|
|
||||||
ID int64 `gorm:"id"`
|
|
||||||
TenantID string `gorm:"tenant_id"`
|
|
||||||
Username string `gorm:"username"`
|
|
||||||
EncryptedPassword string `gorm:"encrypted_password"`
|
|
||||||
IsSuper bool `gorm:"is_super"`
|
|
||||||
IsDeleted bool `gorm:"is_deleted"`
|
|
||||||
CreatedAt time.Time `gorm:"created_at"`
|
|
||||||
UpdatedAt time.Time `gorm:"updated_at"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v User) TableName() string {
|
|
||||||
return "credential_users"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=IUserDb
|
|
||||||
type IUserDb interface {
|
|
||||||
GetByUsername(tenantID string, username string) (*User, error)
|
|
||||||
ListUser(tenantID string) ([]*User, error)
|
|
||||||
Insert(in *User) error
|
|
||||||
MarkDeletedByUsername(tenantID string, username string) error
|
|
||||||
UpdatePassword(tenantID string, username string, encryptedPassword string) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// model <---> db
|
|
||||||
|
|
||||||
func UnmarshalUserModel(user *User) *model.Credential {
|
|
||||||
if user == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Credential{
|
|
||||||
Username: user.Username,
|
|
||||||
EncryptedPassword: user.EncryptedPassword,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
package dbmodel
|
|
||||||
|
|
||||||
type UserRole struct {
|
|
||||||
Base
|
|
||||||
UserID int64 `gorm:"user_id"`
|
|
||||||
RoleID int64 `gorm:"role_id"`
|
|
||||||
|
|
||||||
User User `gorm:"foreignKey:UserID"`
|
|
||||||
Role Role `gorm:"foreignKey:RoleID"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u *UserRole) TableName() string {
|
|
||||||
return "user_role"
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:generate mockery --name=IUserRoleDb
|
|
||||||
type IUserRoleDb interface {
|
|
||||||
GetUserRoles(tenantID string, userID int64, roleID int64) ([]*UserRole, error)
|
|
||||||
Insert(in *UserRole) error
|
|
||||||
Delete(tenantID string, userID int64, roleID int64) error
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -39,9 +39,6 @@ import (
|
|||||||
"github.com/milvus-io/milvus/internal/kv"
|
"github.com/milvus-io/milvus/internal/kv"
|
||||||
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
|
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
|
||||||
"github.com/milvus-io/milvus/internal/metastore"
|
"github.com/milvus-io/milvus/internal/metastore"
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dao"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/dbcore"
|
|
||||||
"github.com/milvus-io/milvus/internal/metastore/db/rootcoord"
|
|
||||||
kvmetestore "github.com/milvus-io/milvus/internal/metastore/kv/rootcoord"
|
kvmetestore "github.com/milvus-io/milvus/internal/metastore/kv/rootcoord"
|
||||||
"github.com/milvus-io/milvus/internal/metastore/model"
|
"github.com/milvus-io/milvus/internal/metastore/model"
|
||||||
pb "github.com/milvus-io/milvus/internal/proto/etcdpb"
|
pb "github.com/milvus-io/milvus/internal/proto/etcdpb"
|
||||||
@ -344,14 +341,6 @@ func (c *Core) initMetaTable() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
catalog = &kvmetestore.Catalog{Txn: metaKV, Snapshot: ss}
|
catalog = &kvmetestore.Catalog{Txn: metaKV, Snapshot: ss}
|
||||||
case util.MetaStoreTypeMysql:
|
|
||||||
// connect to database
|
|
||||||
err := dbcore.Connect(&Params.DBCfg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
catalog = rootcoord.NewTableCatalog(dbcore.NewTxImpl(), dao.NewMetaDomain())
|
|
||||||
default:
|
default:
|
||||||
return retry.Unrecoverable(fmt.Errorf("not supported meta store: %s", Params.MetaStoreCfg.MetaStoreType.GetValue()))
|
return retry.Unrecoverable(fmt.Errorf("not supported meta store: %s", Params.MetaStoreCfg.MetaStoreType.GetValue()))
|
||||||
}
|
}
|
||||||
|
|||||||
@ -211,7 +211,6 @@ github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre
|
|||||||
github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8=
|
github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8=
|
||||||
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
||||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||||
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
|
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
|
||||||
|
|||||||
@ -26,7 +26,6 @@ import (
|
|||||||
// Meta Prefix consts
|
// Meta Prefix consts
|
||||||
const (
|
const (
|
||||||
MetaStoreTypeEtcd = "etcd"
|
MetaStoreTypeEtcd = "etcd"
|
||||||
MetaStoreTypeMysql = "mysql"
|
|
||||||
|
|
||||||
SegmentMetaPrefix = "queryCoord-segmentMeta"
|
SegmentMetaPrefix = "queryCoord-segmentMeta"
|
||||||
ChangeInfoMetaPrefix = "queryCoord-sealedSegmentChangeInfo"
|
ChangeInfoMetaPrefix = "queryCoord-sealedSegmentChangeInfo"
|
||||||
|
|||||||
@ -55,7 +55,7 @@ const (
|
|||||||
|
|
||||||
// Const of Global Config List
|
// Const of Global Config List
|
||||||
func globalConfigPrefixs() []string {
|
func globalConfigPrefixs() []string {
|
||||||
return []string{"metastore", "localStorage", "etcd", "mysql", "minio", "pulsar", "kafka", "rocksmq", "log", "grpc", "common", "quotaAndLimits"}
|
return []string{"metastore", "localStorage", "etcd", "minio", "pulsar", "kafka", "rocksmq", "log", "grpc", "common", "quotaAndLimits"}
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultYaml = []string{"milvus.yaml", "default.yaml", "user.yaml"}
|
var defaultYaml = []string{"milvus.yaml", "default.yaml", "user.yaml"}
|
||||||
|
|||||||
@ -47,7 +47,6 @@ type ServiceParam struct {
|
|||||||
LocalStorageCfg LocalStorageConfig
|
LocalStorageCfg LocalStorageConfig
|
||||||
MetaStoreCfg MetaStoreConfig
|
MetaStoreCfg MetaStoreConfig
|
||||||
EtcdCfg EtcdConfig
|
EtcdCfg EtcdConfig
|
||||||
DBCfg MetaDBConfig
|
|
||||||
MQCfg MQConfig
|
MQCfg MQConfig
|
||||||
PulsarCfg PulsarConfig
|
PulsarCfg PulsarConfig
|
||||||
KafkaCfg KafkaConfig
|
KafkaCfg KafkaConfig
|
||||||
@ -62,7 +61,6 @@ func (p *ServiceParam) init() {
|
|||||||
p.LocalStorageCfg.Init(&p.BaseTable)
|
p.LocalStorageCfg.Init(&p.BaseTable)
|
||||||
p.MetaStoreCfg.Init(&p.BaseTable)
|
p.MetaStoreCfg.Init(&p.BaseTable)
|
||||||
p.EtcdCfg.Init(&p.BaseTable)
|
p.EtcdCfg.Init(&p.BaseTable)
|
||||||
p.DBCfg.Init(&p.BaseTable)
|
|
||||||
p.MQCfg.Init(&p.BaseTable)
|
p.MQCfg.Init(&p.BaseTable)
|
||||||
p.PulsarCfg.Init(&p.BaseTable)
|
p.PulsarCfg.Init(&p.BaseTable)
|
||||||
p.KafkaCfg.Init(&p.BaseTable)
|
p.KafkaCfg.Init(&p.BaseTable)
|
||||||
@ -287,83 +285,12 @@ func (p *MetaStoreConfig) Init(base *BaseTable) {
|
|||||||
Key: "metastore.type",
|
Key: "metastore.type",
|
||||||
Version: "2.2.0",
|
Version: "2.2.0",
|
||||||
DefaultValue: util.MetaStoreTypeEtcd,
|
DefaultValue: util.MetaStoreTypeEtcd,
|
||||||
Doc: `Default value: etcd
|
Doc: `Default value: etcd, Valid values: etcd `,
|
||||||
Valid values: [etcd, mysql]`,
|
|
||||||
Export: true,
|
Export: true,
|
||||||
}
|
}
|
||||||
p.MetaStoreType.Init(base.mgr)
|
p.MetaStoreType.Init(base.mgr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// /////////////////////////////////////////////////////////////////////////////
|
|
||||||
// --- meta db ---
|
|
||||||
type MetaDBConfig struct {
|
|
||||||
Username ParamItem `refreshable:"false"`
|
|
||||||
Password ParamItem `refreshable:"false"`
|
|
||||||
Address ParamItem `refreshable:"false"`
|
|
||||||
Port ParamItem `refreshable:"false"`
|
|
||||||
DBName ParamItem `refreshable:"false"`
|
|
||||||
MaxOpenConns ParamItem `refreshable:"false"`
|
|
||||||
MaxIdleConns ParamItem `refreshable:"false"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *MetaDBConfig) Init(base *BaseTable) {
|
|
||||||
p.Username = ParamItem{
|
|
||||||
Key: "mysql.username",
|
|
||||||
Version: "2.2.0",
|
|
||||||
PanicIfEmpty: true,
|
|
||||||
Export: true,
|
|
||||||
}
|
|
||||||
p.Username.Init(base.mgr)
|
|
||||||
|
|
||||||
p.Password = ParamItem{
|
|
||||||
Key: "mysql.password",
|
|
||||||
Version: "2.2.0",
|
|
||||||
PanicIfEmpty: true,
|
|
||||||
Export: true,
|
|
||||||
}
|
|
||||||
p.Password.Init(base.mgr)
|
|
||||||
|
|
||||||
p.Address = ParamItem{
|
|
||||||
Key: "mysql.address",
|
|
||||||
Version: "2.2.0",
|
|
||||||
PanicIfEmpty: true,
|
|
||||||
Export: true,
|
|
||||||
}
|
|
||||||
p.Address.Init(base.mgr)
|
|
||||||
|
|
||||||
p.Port = ParamItem{
|
|
||||||
Key: "mysql.port",
|
|
||||||
Version: "2.2.0",
|
|
||||||
DefaultValue: "3306",
|
|
||||||
Export: true,
|
|
||||||
}
|
|
||||||
p.Port.Init(base.mgr)
|
|
||||||
|
|
||||||
p.DBName = ParamItem{
|
|
||||||
Key: "mysql.dbName",
|
|
||||||
Version: "2.2.0",
|
|
||||||
PanicIfEmpty: true,
|
|
||||||
Export: true,
|
|
||||||
}
|
|
||||||
p.DBName.Init(base.mgr)
|
|
||||||
|
|
||||||
p.MaxOpenConns = ParamItem{
|
|
||||||
Key: "mysql.maxOpenConns",
|
|
||||||
Version: "2.2.0",
|
|
||||||
DefaultValue: "20",
|
|
||||||
Export: true,
|
|
||||||
}
|
|
||||||
p.MaxOpenConns.Init(base.mgr)
|
|
||||||
|
|
||||||
p.MaxIdleConns = ParamItem{
|
|
||||||
Key: "mysql.maxIdleConns",
|
|
||||||
Version: "2.2.0",
|
|
||||||
DefaultValue: "5",
|
|
||||||
Export: true,
|
|
||||||
}
|
|
||||||
p.MaxIdleConns.Init(base.mgr)
|
|
||||||
}
|
|
||||||
|
|
||||||
// /////////////////////////////////////////////////////////////////////////////
|
// /////////////////////////////////////////////////////////////////////////////
|
||||||
// --- mq ---
|
// --- mq ---
|
||||||
|
|
||||||
|
|||||||
@ -151,25 +151,7 @@ def update_values(file_path, deploy_mode, hostname, server_tag, milvus_config, s
|
|||||||
# if "replicas" in milvus_config["readonly"]:
|
# if "replicas" in milvus_config["readonly"]:
|
||||||
# values_dict["readonly"]["replicas"] = milvus_config["readonly"]["replicas"]
|
# values_dict["readonly"]["replicas"] = milvus_config["readonly"]["replicas"]
|
||||||
|
|
||||||
# use_external_mysql = False
|
|
||||||
# if "external_mysql" in milvus_config and milvus_config["external_mysql"]:
|
|
||||||
# use_external_mysql = True
|
|
||||||
# # meta mysql
|
|
||||||
# if use_external_mysql:
|
|
||||||
# values_dict["mysql"]["enabled"] = False
|
|
||||||
# # values_dict["mysql"]["persistence"]["enabled"] = True
|
|
||||||
# # values_dict["mysql"]["persistence"]["existingClaim"] = hashlib.md5(path_value.encode(encoding='UTF-8')).hexdigest()
|
|
||||||
# values_dict['externalMysql']['enabled'] = True
|
|
||||||
# if deploy_mode == "local":
|
|
||||||
# values_dict['externalMysql']["ip"] = "192.168.1.238"
|
|
||||||
# else:
|
|
||||||
# values_dict['externalMysql']["ip"] = "milvus-mysql.test"
|
|
||||||
# values_dict['externalMysql']["port"] = 3306
|
|
||||||
# values_dict['externalMysql']["user"] = "root"
|
|
||||||
# values_dict['externalMysql']["password"] = "milvus"
|
|
||||||
# values_dict['externalMysql']["database"] = "db"
|
|
||||||
# else:
|
|
||||||
# values_dict["mysql"]["enabled"] = False
|
|
||||||
# # update values.yaml with the given host
|
# # update values.yaml with the given host
|
||||||
node_config = None
|
node_config = None
|
||||||
perf_tolerations = [{
|
perf_tolerations = [{
|
||||||
@ -351,7 +333,7 @@ def restart_server(helm_release_name, namespace):
|
|||||||
# body = {"replicas": 0}
|
# body = {"replicas": 0}
|
||||||
pods = v1.list_namespaced_pod(namespace)
|
pods = v1.list_namespaced_pod(namespace)
|
||||||
for i in pods.items:
|
for i in pods.items:
|
||||||
if i.metadata.name.find(helm_release_name) != -1 and i.metadata.name.find("mysql") == -1:
|
if i.metadata.name.find(helm_release_name) != -1:
|
||||||
pod_name = i.metadata.name
|
pod_name = i.metadata.name
|
||||||
break
|
break
|
||||||
# v1.patch_namespaced_config_map(config_map_name, namespace, body, pretty='true')
|
# v1.patch_namespaced_config_map(config_map_name, namespace, body, pretty='true')
|
||||||
@ -374,7 +356,7 @@ def restart_server(helm_release_name, namespace):
|
|||||||
logger.error(pod_name_tmp)
|
logger.error(pod_name_tmp)
|
||||||
if pod_name_tmp == pod_name:
|
if pod_name_tmp == pod_name:
|
||||||
continue
|
continue
|
||||||
elif pod_name_tmp.find(helm_release_name) == -1 or pod_name_tmp.find("mysql") != -1:
|
elif pod_name_tmp.find(helm_release_name) == -1:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
status_res = v1.read_namespaced_pod_status(pod_name_tmp, namespace, pretty='true')
|
status_res = v1.read_namespaced_pod_status(pod_name_tmp, namespace, pretty='true')
|
||||||
|
|||||||
@ -18,7 +18,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
||||||
collection_name: sift_128_euclidean
|
collection_name: sift_128_euclidean
|
||||||
index_types: ['flat']
|
index_types: ['flat']
|
||||||
@ -47,7 +47,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
||||||
collection_name: sift_128_euclidean
|
collection_name: sift_128_euclidean
|
||||||
@ -77,7 +77,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
||||||
collection_name: sift_128_euclidean
|
collection_name: sift_128_euclidean
|
||||||
@ -108,7 +108,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
||||||
collection_name: sift_128_euclidean
|
collection_name: sift_128_euclidean
|
||||||
@ -138,7 +138,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
||||||
collection_name: sift_128_euclidean
|
collection_name: sift_128_euclidean
|
||||||
@ -169,7 +169,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
||||||
collection_name: glove_200_angular
|
collection_name: glove_200_angular
|
||||||
@ -199,7 +199,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
||||||
collection_name: glove_200_angular
|
collection_name: glove_200_angular
|
||||||
@ -229,7 +229,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
||||||
collection_name: glove_200_angular
|
collection_name: glove_200_angular
|
||||||
@ -259,7 +259,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
||||||
collection_name: glove_200_angular
|
collection_name: glove_200_angular
|
||||||
@ -289,7 +289,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
||||||
collection_name: glove_200_angular
|
collection_name: glove_200_angular
|
||||||
@ -320,7 +320,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
|
|
||||||
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
source_file: /test/milvus/ann_hdf5/glove-200-angular.hdf5
|
||||||
collection_name: glove_200_angular
|
collection_name: glove_200_angular
|
||||||
|
|||||||
@ -15,7 +15,7 @@ ann_accuracy:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
||||||
collection_name: sift_128_euclidean
|
collection_name: sift_128_euclidean
|
||||||
index_types: ['ivf_flat', 'ivf_sq8']
|
index_types: ['ivf_flat', 'ivf_sq8']
|
||||||
@ -38,7 +38,6 @@ ann_accuracy:
|
|||||||
# gpu_resource_config.build_index_resources:
|
# gpu_resource_config.build_index_resources:
|
||||||
# - gpu0
|
# - gpu0
|
||||||
# cluster: false
|
# cluster: false
|
||||||
# external_mysql: false
|
|
||||||
# source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
# source_file: /test/milvus/ann_hdf5/sift-128-euclidean.hdf5
|
||||||
# collection_name: sift_128_euclidean
|
# collection_name: sift_128_euclidean
|
||||||
# index_types: ['ivf_flat', 'ivf_sq8']
|
# index_types: ['ivf_flat', 'ivf_sq8']
|
||||||
|
|||||||
@ -39,7 +39,7 @@ search_performance:
|
|||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
external_mysql: true
|
|
||||||
collection_name: sift_10m_128_l2_011
|
collection_name: sift_10m_128_l2_011
|
||||||
run_count: 2
|
run_count: 2
|
||||||
top_ks: [1, 10, 100, 1000]
|
top_ks: [1, 10, 100, 1000]
|
||||||
|
|||||||
@ -66,7 +66,7 @@ search_performance:
|
|||||||
# - gpu1
|
# - gpu1
|
||||||
# wal_enable: true
|
# wal_enable: true
|
||||||
# cluster: true
|
# cluster: true
|
||||||
# external_mysql: true
|
#
|
||||||
# collection_name: sift_50m_128_l2_011
|
# collection_name: sift_50m_128_l2_011
|
||||||
# run_count: 2
|
# run_count: 2
|
||||||
# top_ks: [1, 10, 100, 1000]
|
# top_ks: [1, 10, 100, 1000]
|
||||||
|
|||||||
@ -15,7 +15,7 @@ insert_performance:
|
|||||||
- gpu0
|
- gpu0
|
||||||
- gpu1
|
- gpu1
|
||||||
wal_enable: true
|
wal_enable: true
|
||||||
external_mysql: true
|
|
||||||
cluster: true
|
cluster: true
|
||||||
collection_name: sift_1b_128_l2_sq8_011
|
collection_name: sift_1b_128_l2_sq8_011
|
||||||
ni_per: 50000
|
ni_per: 50000
|
||||||
|
|||||||
@ -14,7 +14,7 @@ locust_mix_performance:
|
|||||||
gpu_resource_config.build_index_resources:
|
gpu_resource_config.build_index_resources:
|
||||||
gpu0
|
gpu0
|
||||||
wal_enable: true
|
wal_enable: true
|
||||||
external_mysql: true
|
|
||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 2
|
replicas: 2
|
||||||
|
|||||||
@ -15,7 +15,7 @@ search_performance:
|
|||||||
- gpu0
|
- gpu0
|
||||||
- gpu1
|
- gpu1
|
||||||
wal_enable: true
|
wal_enable: true
|
||||||
external_mysql: true
|
|
||||||
cluster: true
|
cluster: true
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 3
|
replicas: 3
|
||||||
|
|||||||
@ -18,7 +18,7 @@ locust_search_performance:
|
|||||||
- gpu1
|
- gpu1
|
||||||
wal_enable: true
|
wal_enable: true
|
||||||
cluster: true
|
cluster: true
|
||||||
external_mysql: true
|
|
||||||
readonly:
|
readonly:
|
||||||
replicas: 1
|
replicas: 1
|
||||||
collection_name: sift_1m_128_l2_2
|
collection_name: sift_1m_128_l2_2
|
||||||
|
|||||||
@ -1,45 +0,0 @@
|
|||||||
import time
|
|
||||||
import logging
|
|
||||||
import pytest
|
|
||||||
from pymilvus import IndexType
|
|
||||||
|
|
||||||
from utils.util_pymilvus import get_milvus, gen_vectors, default_dim
|
|
||||||
from common.common_type import CaseLabel
|
|
||||||
|
|
||||||
|
|
||||||
class TestMysql:
|
|
||||||
"""
|
|
||||||
******************************************************************
|
|
||||||
The following cases are used to test mysql failure
|
|
||||||
******************************************************************
|
|
||||||
"""
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function", autouse=True)
|
|
||||||
def skip_check(self, connect, args):
|
|
||||||
if args["service_name"].find("shards") != -1:
|
|
||||||
reason = "Skip restart cases in shards mode"
|
|
||||||
logging.getLogger().info(reason)
|
|
||||||
pytest.skip(reason)
|
|
||||||
|
|
||||||
@pytest.mark.tags(CaseLabel.L2)
|
|
||||||
def _test_kill_mysql_during_index(self, connect, collection, args):
|
|
||||||
big_nb = 20000
|
|
||||||
index_param = {"nlist": 1024, "m": 16}
|
|
||||||
index_type = IndexType.IVF_PQ
|
|
||||||
vectors = gen_vectors(big_nb, default_dim)
|
|
||||||
status, ids = connect.bulk_insert(collection, vectors, ids=[i for i in range(big_nb)])
|
|
||||||
status = connect.flush([collection])
|
|
||||||
assert status.OK()
|
|
||||||
status, res_count = connect.count_entities(collection)
|
|
||||||
logging.getLogger().info(res_count)
|
|
||||||
assert status.OK()
|
|
||||||
assert res_count == big_nb
|
|
||||||
logging.getLogger().info("Start create index async")
|
|
||||||
status = connect.create_index(collection, index_type, index_param, _async=True)
|
|
||||||
time.sleep(2)
|
|
||||||
logging.getLogger().info("Start play mysql failure")
|
|
||||||
# pass
|
|
||||||
new_connect = get_milvus(args["ip"], args["port"], handler=args["handler"])
|
|
||||||
status, res_count = new_connect.count_entities(collection)
|
|
||||||
assert status.OK()
|
|
||||||
assert res_count == big_nb
|
|
||||||
@ -448,7 +448,6 @@ class TestAliasOperationInvalid(TestcaseBase):
|
|||||||
# check_items=error)
|
# check_items=error)
|
||||||
|
|
||||||
@pytest.mark.tags(CaseLabel.L1)
|
@pytest.mark.tags(CaseLabel.L1)
|
||||||
@pytest.mark.skip("not supported on mysql")
|
|
||||||
def test_alias_drop_not_exist_alias(self):
|
def test_alias_drop_not_exist_alias(self):
|
||||||
"""
|
"""
|
||||||
target: test collection dropping alias which is not exist
|
target: test collection dropping alias which is not exist
|
||||||
|
|||||||
@ -863,7 +863,7 @@ def restart_server(helm_release_name):
|
|||||||
# body = {"replicas": 0}
|
# body = {"replicas": 0}
|
||||||
pods = v1.list_namespaced_pod(namespace)
|
pods = v1.list_namespaced_pod(namespace)
|
||||||
for i in pods.items:
|
for i in pods.items:
|
||||||
if i.metadata.name.find(helm_release_name) != -1 and i.metadata.name.find("mysql") == -1:
|
if i.metadata.name.find(helm_release_name) != -1:
|
||||||
pod_name = i.metadata.name
|
pod_name = i.metadata.name
|
||||||
break
|
break
|
||||||
# v1.patch_namespaced_config_map(config_map_name, namespace, body, pretty='true')
|
# v1.patch_namespaced_config_map(config_map_name, namespace, body, pretty='true')
|
||||||
@ -886,7 +886,7 @@ def restart_server(helm_release_name):
|
|||||||
log.error(pod_name_tmp)
|
log.error(pod_name_tmp)
|
||||||
if pod_name_tmp == pod_name:
|
if pod_name_tmp == pod_name:
|
||||||
continue
|
continue
|
||||||
elif pod_name_tmp.find(helm_release_name) == -1 or pod_name_tmp.find("mysql") != -1:
|
elif pod_name_tmp.find(helm_release_name) == -1:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
status_res = v1.read_namespaced_pod_status(pod_name_tmp, namespace, pretty='true')
|
status_res = v1.read_namespaced_pod_status(pod_name_tmp, namespace, pretty='true')
|
||||||
|
|||||||
@ -1,274 +0,0 @@
|
|||||||
mysql:
|
|
||||||
enabled: true
|
|
||||||
initdbScripts:
|
|
||||||
meta.sql: |+
|
|
||||||
-- create database
|
|
||||||
CREATE DATABASE if not exists milvus_meta CHARACTER SET utf8mb4;
|
|
||||||
|
|
||||||
/*
|
|
||||||
create tables script
|
|
||||||
|
|
||||||
Notices:
|
|
||||||
1. id, tenant_id, is_deleted, created_at, updated_at are 5 common columns for all collections.
|
|
||||||
2. Query index in community version CANNOT includes tenant_id, since tenant_id is not existed and will miss query index.
|
|
||||||
*/
|
|
||||||
|
|
||||||
-- collections
|
|
||||||
CREATE TABLE if not exists milvus_meta.collections (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
collection_name VARCHAR(256),
|
|
||||||
description VARCHAR(2048) DEFAULT NULL,
|
|
||||||
auto_id BOOL DEFAULT FALSE,
|
|
||||||
shards_num INT,
|
|
||||||
start_position TEXT,
|
|
||||||
consistency_level INT,
|
|
||||||
status INT NOT NULL,
|
|
||||||
properties VARCHAR(512),
|
|
||||||
ts BIGINT UNSIGNED DEFAULT 0,
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
UNIQUE KEY uk_tenant_id_collection_id_ts (tenant_id, collection_id, ts)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- collection aliases
|
|
||||||
CREATE TABLE if not exists milvus_meta.collection_aliases (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
collection_alias VARCHAR(128),
|
|
||||||
ts BIGINT UNSIGNED DEFAULT 0,
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
UNIQUE KEY uk_tenant_id_collection_alias_ts (tenant_id, collection_alias, ts),
|
|
||||||
INDEX idx_tenant_id_collection_id_ts (tenant_id, collection_id, ts)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- channels
|
|
||||||
CREATE TABLE if not exists milvus_meta.collection_channels (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
virtual_channel_name VARCHAR(256) NOT NULL,
|
|
||||||
physical_channel_name VARCHAR(256) NOT NULL,
|
|
||||||
removed BOOL DEFAULT FALSE,
|
|
||||||
ts BIGINT UNSIGNED DEFAULT 0,
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
UNIQUE KEY uk_tenant_id_collection_id_virtual_channel_name_ts (tenant_id, collection_id, virtual_channel_name, ts),
|
|
||||||
INDEX idx_tenant_id_collection_id_ts (tenant_id, collection_id, ts)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- fields
|
|
||||||
CREATE TABLE if not exists milvus_meta.field_schemas (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
field_id BIGINT NOT NULL,
|
|
||||||
field_name VARCHAR(256) NOT NULL,
|
|
||||||
is_primary_key BOOL NOT NULL,
|
|
||||||
description VARCHAR(2048) DEFAULT NULL,
|
|
||||||
data_type INT UNSIGNED NOT NULL,
|
|
||||||
type_params VARCHAR(2048),
|
|
||||||
index_params VARCHAR(2048),
|
|
||||||
auto_id BOOL NOT NULL,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
ts BIGINT UNSIGNED DEFAULT 0,
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
UNIQUE KEY uk_tenant_id_collection_id_field_name_ts (tenant_id, collection_id, field_name, ts),
|
|
||||||
INDEX idx_tenant_id_collection_id_field_id_ts (tenant_id, collection_id, field_id, ts)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- partitions
|
|
||||||
CREATE TABLE if not exists milvus_meta.`partitions` (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
partition_id BIGINT NOT NULL,
|
|
||||||
partition_name VARCHAR(256),
|
|
||||||
partition_created_timestamp bigint unsigned,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
status INT NOT NULL,
|
|
||||||
ts BIGINT UNSIGNED DEFAULT 0,
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
UNIQUE KEY uk_tenant_id_collection_id_partition_name_ts (tenant_id, collection_id, partition_name, ts),
|
|
||||||
INDEX idx_tenant_id_collection_id_partition_id_ts (tenant_id, collection_id, partition_id, ts)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- indexes
|
|
||||||
CREATE TABLE if not exists milvus_meta.`indexes` (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
field_id BIGINT NOT NULL,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
index_id BIGINT NOT NULL,
|
|
||||||
index_name VARCHAR(256),
|
|
||||||
index_params VARCHAR(2048),
|
|
||||||
user_index_params VARCHAR(2048),
|
|
||||||
is_auto_index BOOL DEFAULT FALSE,
|
|
||||||
create_time bigint unsigned,
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
INDEX idx_tenant_id_collection_id_index_id (tenant_id, collection_id, index_id)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- index file paths
|
|
||||||
CREATE TABLE if not exists milvus_meta.index_file_paths (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
index_build_id BIGINT NOT NULL,
|
|
||||||
index_file_path VARCHAR(256),
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
INDEX idx_tenant_id_index_build_id (tenant_id, index_build_id)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- segments
|
|
||||||
CREATE TABLE if not exists milvus_meta.segments (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
segment_id BIGINT NOT NULL,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
partition_id BIGINT NOT NULL,
|
|
||||||
num_rows BIGINT NOT NULL,
|
|
||||||
max_row_num INT COMMENT 'estimate max rows',
|
|
||||||
dm_channel VARCHAR(128) NOT NULL,
|
|
||||||
dml_position TEXT COMMENT 'checkpoint',
|
|
||||||
start_position TEXT,
|
|
||||||
compaction_from VARCHAR(4096) COMMENT 'old segment IDs',
|
|
||||||
created_by_compaction BOOL,
|
|
||||||
segment_state TINYINT UNSIGNED NOT NULL,
|
|
||||||
last_expire_time bigint unsigned COMMENT 'segment assignment expiration time',
|
|
||||||
dropped_at bigint unsigned,
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
INDEX idx_tenant_id_collection_id_segment_id (tenant_id, collection_id, segment_id)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- segment indexes
|
|
||||||
CREATE TABLE if not exists milvus_meta.segment_indexes (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
partition_id BIGINT NOT NULL,
|
|
||||||
segment_id BIGINT NOT NULL,
|
|
||||||
field_id BIGINT NOT NULL,
|
|
||||||
index_id BIGINT NOT NULL,
|
|
||||||
index_build_id BIGINT,
|
|
||||||
enable_index BOOL NOT NULL,
|
|
||||||
create_time bigint unsigned,
|
|
||||||
index_file_keys VARCHAR(4096),
|
|
||||||
index_size BIGINT UNSIGNED,
|
|
||||||
`version` INT UNSIGNED,
|
|
||||||
is_deleted BOOL DEFAULT FALSE COMMENT 'as mark_deleted',
|
|
||||||
recycled BOOL DEFAULT FALSE COMMENT 'binlog files truly deleted',
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
UNIQUE KEY uk_tenant_id_segment_id_index_id (tenant_id, segment_id, index_id),
|
|
||||||
INDEX idx_tenant_id_collection_id_segment_id_index_id (tenant_id, collection_id, segment_id, index_id)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- binlog files info
|
|
||||||
CREATE TABLE if not exists milvus_meta.binlogs (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
field_id BIGINT NOT NULL,
|
|
||||||
segment_id BIGINT NOT NULL,
|
|
||||||
collection_id BIGINT NOT NULL,
|
|
||||||
log_type SMALLINT UNSIGNED NOT NULL COMMENT 'binlog、stats binlog、delta binlog',
|
|
||||||
num_entries BIGINT,
|
|
||||||
timestamp_from BIGINT UNSIGNED,
|
|
||||||
timestamp_to BIGINT UNSIGNED,
|
|
||||||
log_path VARCHAR(256) NOT NULL,
|
|
||||||
log_size BIGINT NOT NULL,
|
|
||||||
is_deleted BOOL DEFAULT FALSE,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
INDEX idx_tenant_id_segment_id_log_type (tenant_id, segment_id, log_type)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- users
|
|
||||||
CREATE TABLE if not exists milvus_meta.credential_users (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
username VARCHAR(128) NOT NULL,
|
|
||||||
encrypted_password VARCHAR(256) NOT NULL,
|
|
||||||
is_super BOOL NOT NULL DEFAULT false,
|
|
||||||
is_deleted BOOL NOT NULL DEFAULT false,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
PRIMARY KEY (id),
|
|
||||||
INDEX idx_tenant_id_username (tenant_id, username)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- role
|
|
||||||
CREATE TABLE if not exists milvus_meta.role (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
name VARCHAR(128) NOT NULL,
|
|
||||||
is_deleted BOOL NOT NULL DEFAULT false,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
INDEX idx_role_tenant_name (tenant_id, name, is_deleted),
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- user-role
|
|
||||||
CREATE TABLE if not exists milvus_meta.user_role (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
user_id BIGINT NOT NULL,
|
|
||||||
role_id BIGINT NOT NULL,
|
|
||||||
is_deleted BOOL NOT NULL DEFAULT false,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
INDEX idx_role_mapping_tenant_user_role (tenant_id, user_id, role_id, is_deleted),
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- grant
|
|
||||||
CREATE TABLE if not exists milvus_meta.grant (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
role_id BIGINT NOT NULL,
|
|
||||||
object VARCHAR(128) NOT NULL,
|
|
||||||
object_name VARCHAR(128) NOT NULL,
|
|
||||||
is_deleted BOOL NOT NULL DEFAULT false,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
INDEX idx_grant_principal_resource_tenant (tenant_id, role_id, object, object_name, is_deleted),
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
|
|
||||||
-- grant-id
|
|
||||||
CREATE TABLE if not exists milvus_meta.grant_id (
|
|
||||||
id BIGINT NOT NULL AUTO_INCREMENT,
|
|
||||||
tenant_id VARCHAR(128) DEFAULT NULL,
|
|
||||||
grant_id BIGINT NOT NULL,
|
|
||||||
grantor_id BIGINT NOT NULL,
|
|
||||||
privilege VARCHAR(128) NOT NULL,
|
|
||||||
is_deleted BOOL NOT NULL DEFAULT false,
|
|
||||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update current_timestamp,
|
|
||||||
INDEX idx_grant_id_tenant_grantor (tenant_id, grant_id, grantor_id, is_deleted),
|
|
||||||
PRIMARY KEY (id)
|
|
||||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
|
||||||
Loading…
x
Reference in New Issue
Block a user