mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-06 17:18:35 +08:00
Add logic of autoindex (#19570)
Signed-off-by: zhenshan.cao <zhenshan.cao@zilliz.com> Signed-off-by: zhenshan.cao <zhenshan.cao@zilliz.com>
This commit is contained in:
parent
76515126b7
commit
0ac4bc32a5
4
Makefile
4
Makefile
@ -168,6 +168,10 @@ test-indexnode:
|
||||
@echo "Running go unittests..."
|
||||
@(env bash $(PWD)/scripts/run_go_unittest.sh -t indexnode)
|
||||
|
||||
test-indexcoord:
|
||||
@echo "Running go unittests..."
|
||||
@(env bash $(PWD)/scripts/run_go_unittest.sh -t indexcoord)
|
||||
|
||||
test-proxy:
|
||||
@echo "Running go unittests..."
|
||||
@(env bash $(PWD)/scripts/run_go_unittest.sh -t proxy)
|
||||
|
||||
@ -41,7 +41,7 @@ etcd:
|
||||
enabled: false # Whether to support ETCD secure connection mode
|
||||
tlsCert: /path/to/etcd-client.pem # path to your cert file
|
||||
tlsKey: /path/to/etcd-client-key.pem # path to your key file
|
||||
tlsCACert: /path/to/ca.pem # path to your CACert file
|
||||
tlsCACert: /path/to/ca.pem # path to your CACert file
|
||||
# TLS min version
|
||||
# Optional values: 1.0, 1.1, 1.2, 1.3。
|
||||
# We recommend using version 1.2 and above
|
||||
@ -114,7 +114,7 @@ rocksmq:
|
||||
retentionTimeInMinutes: 7200 # 5 days, 5 * 24 * 60 minutes, The retention time of the message in rocksmq.
|
||||
retentionSizeInMB: 8192 # 8 GB, 8 * 1024 MB, The retention size of the message in rocksmq.
|
||||
compactionInterval: 86400 # 1 day, trigger rocksdb compaction every day to remove deleted data
|
||||
lrucacheratio: 0.06 # rocksdb cache memory ratio
|
||||
lrucacheratio: 0.06 # rocksdb cache memory ratio
|
||||
|
||||
# Related configuration of rootCoord, used to handle data definition language (DDL) and data control language (DCL) requests
|
||||
rootCoord:
|
||||
@ -297,8 +297,8 @@ grpc:
|
||||
clientMaxSendSize: 104857600 # 100 MB, 100 * 1024 * 1024
|
||||
|
||||
client:
|
||||
dialTimeout: 5000
|
||||
keepAliveTime: 10000
|
||||
dialTimeout: 5000
|
||||
keepAliveTime: 10000
|
||||
keepAliveTimeout: 20000
|
||||
maxMaxAttempts: 5
|
||||
initialBackOff: 1.0
|
||||
@ -341,7 +341,7 @@ common:
|
||||
defaultPartitionName: "_default" # default partition name for a collection
|
||||
defaultIndexName: "_default_idx" # default index name
|
||||
retentionDuration: 86400 # time travel reserved time, insert/delete will not be cleaned in this period. 1 days in seconds
|
||||
entityExpiration: -1 # Entity expiration in seconds, CAUTION make sure entityExpiration >= retentionDuration and -1 means never expire
|
||||
entityExpiration: -1 # Entity expiration in seconds, CAUTION make sure entityExpiration >= retentionDuration and -1 means never expire
|
||||
|
||||
gracefulTime: 5000 # milliseconds. it represents the interval (in ms) by which the request arrival time needs to be subtracted in the case of Bounded Consistency.
|
||||
|
||||
@ -458,3 +458,7 @@ quotaAndLimits:
|
||||
|
||||
# coolOffSpeed is the speed of search&query rates cool off.
|
||||
coolOffSpeed: 0.9 # (0, 1]
|
||||
|
||||
# AutoIndexConfig
|
||||
autoIndex:
|
||||
enable: false
|
||||
|
||||
2
go.mod
2
go.mod
@ -63,6 +63,8 @@ require (
|
||||
|
||||
require github.com/apache/thrift v0.15.0
|
||||
|
||||
require github.com/sandertv/go-formula/v2 v2.0.0-alpha.7 // indirect
|
||||
|
||||
require (
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect
|
||||
github.com/AthenZ/athenz v1.10.15 // indirect
|
||||
|
||||
2
go.sum
2
go.sum
@ -614,6 +614,8 @@ github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfF
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/samber/lo v1.27.0 h1:GOyDWxsblvqYobqsmUuMddPa2/mMzkKyojlXol4+LaQ=
|
||||
github.com/samber/lo v1.27.0/go.mod h1:it33p9UtPMS7z72fP4gw/EIfQB2eI8ke7GR2wc6+Rhg=
|
||||
github.com/sandertv/go-formula/v2 v2.0.0-alpha.7 h1:j6ZnqcpnlGG9oBdhfiGgQ4aQAHEKsMlePvOfD+y5O6s=
|
||||
github.com/sandertv/go-formula/v2 v2.0.0-alpha.7/go.mod h1:Ag4V2fiOHWXct3SraXNN3dFzFtyu9vqBfrjfYWMGLhE=
|
||||
github.com/sanity-io/litter v1.2.0/go.mod h1:JF6pZUFgu2Q0sBZ+HSV35P8TVPI1TTzEwyu9FXAw2W4=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.0.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0=
|
||||
github.com/sbinet/npyio v0.6.0 h1:IyqqQIzRjDym9xnIXsToCKei/qCzxDP+Y74KoMlMgXo=
|
||||
|
||||
@ -74,3 +74,10 @@ const (
|
||||
// SegmentIndexPath storage path const for segment index files.
|
||||
SegmentIndexPath = `index_files`
|
||||
)
|
||||
|
||||
const (
|
||||
IndexParamsKey = "params"
|
||||
IndexTypeKey = "index_type"
|
||||
MetricTypeKey = "metric_type"
|
||||
DimKey = "dim"
|
||||
)
|
||||
|
||||
@ -33,7 +33,6 @@ import (
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/milvus-io/milvus/api/milvuspb"
|
||||
pnc "github.com/milvus-io/milvus/internal/distributed/proxy/client"
|
||||
"github.com/milvus-io/milvus/internal/log"
|
||||
"github.com/milvus-io/milvus/internal/proto/internalpb"
|
||||
"github.com/milvus-io/milvus/internal/proto/proxypb"
|
||||
@ -45,7 +44,6 @@ import (
|
||||
"github.com/milvus-io/milvus/internal/util/funcutil"
|
||||
"github.com/milvus-io/milvus/internal/util/logutil"
|
||||
"github.com/milvus-io/milvus/internal/util/paramtable"
|
||||
"github.com/milvus-io/milvus/internal/util/sessionutil"
|
||||
"github.com/milvus-io/milvus/internal/util/trace"
|
||||
"github.com/milvus-io/milvus/internal/util/typeutil"
|
||||
|
||||
@ -177,21 +175,6 @@ func (s *Server) init() error {
|
||||
|
||||
s.rootCoord.UpdateStateCode(internalpb.StateCode_Initializing)
|
||||
log.Debug("RootCoord", zap.Any("State", internalpb.StateCode_Initializing))
|
||||
s.rootCoord.SetNewProxyClient(
|
||||
func(se *sessionutil.Session) (types.Proxy, error) {
|
||||
cli, err := pnc.NewClient(s.ctx, se.Address)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := cli.Init(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := cli.Start(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cli, nil
|
||||
},
|
||||
)
|
||||
|
||||
if s.newDataCoordClient != nil {
|
||||
log.Debug("RootCoord start to create DataCoord client")
|
||||
|
||||
@ -85,9 +85,6 @@ func (m *mockCore) Stop() error {
|
||||
return fmt.Errorf("stop error")
|
||||
}
|
||||
|
||||
func (m *mockCore) SetNewProxyClient(func(sess *sessionutil.Session) (types.Proxy, error)) {
|
||||
}
|
||||
|
||||
type mockDataCoord struct {
|
||||
types.DataCoord
|
||||
}
|
||||
|
||||
@ -848,11 +848,13 @@ func (i *IndexCoord) DescribeIndex(ctx context.Context, req *indexpb.DescribeInd
|
||||
indexInfos := make([]*indexpb.IndexInfo, 0)
|
||||
for _, index := range indexes {
|
||||
indexInfo := &indexpb.IndexInfo{
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexName: index.IndexName,
|
||||
TypeParams: index.TypeParams,
|
||||
IndexParams: index.IndexParams,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexName: index.IndexName,
|
||||
TypeParams: index.TypeParams,
|
||||
IndexParams: index.IndexParams,
|
||||
IsAutoIndex: index.IsAutoIndex,
|
||||
UserIndexParams: index.UserIndexParams,
|
||||
}
|
||||
if err := i.completeIndexInfo(ctx, indexInfo); err != nil {
|
||||
log.Error("IndexCoord describe index fail", zap.Int64("collectionID", req.CollectionID),
|
||||
|
||||
@ -142,13 +142,15 @@ func (cit *CreateIndexTask) Execute(ctx context.Context) error {
|
||||
cit.indexID = indexID
|
||||
}
|
||||
index := &model.Index{
|
||||
CollectionID: cit.req.CollectionID,
|
||||
FieldID: cit.req.FieldID,
|
||||
IndexID: cit.indexID,
|
||||
IndexName: cit.req.IndexName,
|
||||
TypeParams: cit.req.TypeParams,
|
||||
IndexParams: cit.req.IndexParams,
|
||||
CreateTime: cit.req.Timestamp,
|
||||
CollectionID: cit.req.GetCollectionID(),
|
||||
FieldID: cit.req.GetFieldID(),
|
||||
IndexID: cit.indexID,
|
||||
IndexName: cit.req.GetIndexName(),
|
||||
TypeParams: cit.req.GetTypeParams(),
|
||||
IndexParams: cit.req.GetIndexParams(),
|
||||
CreateTime: cit.req.GetTimestamp(),
|
||||
IsAutoIndex: cit.req.GetIsAutoIndex(),
|
||||
UserIndexParams: cit.req.GetUserIndexParams(),
|
||||
}
|
||||
|
||||
// Get flushed segments
|
||||
|
||||
@ -2,7 +2,6 @@ package indexnode
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"sync"
|
||||
@ -26,10 +25,6 @@ func TestIndexNodeSimple(t *testing.T) {
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, state.Status.ErrorCode, commonpb.ErrorCode_Success)
|
||||
assert.Equal(t, state.State.StateCode, internalpb.StateCode_Healthy)
|
||||
idxParams := map[string]string{
|
||||
"nlist": "128",
|
||||
}
|
||||
idxParamsPayload, err := json.Marshal(idxParams)
|
||||
|
||||
assert.Nil(t, err, err)
|
||||
var (
|
||||
@ -49,10 +44,6 @@ func TestIndexNodeSimple(t *testing.T) {
|
||||
},
|
||||
}
|
||||
indexParams = []*commonpb.KeyValuePair{
|
||||
{
|
||||
Key: "params",
|
||||
Value: string(idxParamsPayload),
|
||||
},
|
||||
{
|
||||
Key: "metric_type",
|
||||
Value: "L2",
|
||||
@ -61,6 +52,10 @@ func TestIndexNodeSimple(t *testing.T) {
|
||||
Key: "index_type",
|
||||
Value: "IVF_FLAT",
|
||||
},
|
||||
{
|
||||
Key: "nlist",
|
||||
Value: "128",
|
||||
},
|
||||
}
|
||||
mockChunkMgr = mockChunkMgrIns
|
||||
)
|
||||
@ -160,11 +155,6 @@ type testTask struct {
|
||||
}
|
||||
|
||||
func TestIndexNodeComplex(t *testing.T) {
|
||||
idxParams := map[string]string{
|
||||
"nlist": "128",
|
||||
}
|
||||
idxParamsPayload, err := json.Marshal(idxParams)
|
||||
assert.Nil(t, err)
|
||||
var (
|
||||
clusterID string
|
||||
buildID0 int64
|
||||
@ -190,8 +180,8 @@ func TestIndexNodeComplex(t *testing.T) {
|
||||
dims = []int{8, 16, 32}
|
||||
indexParams = []*commonpb.KeyValuePair{
|
||||
{
|
||||
Key: "params",
|
||||
Value: string(idxParamsPayload),
|
||||
Key: "nlist",
|
||||
Value: "128",
|
||||
},
|
||||
{
|
||||
Key: "metric_type",
|
||||
|
||||
@ -33,7 +33,6 @@ import (
|
||||
"github.com/milvus-io/milvus/internal/metrics"
|
||||
"github.com/milvus-io/milvus/internal/proto/indexpb"
|
||||
"github.com/milvus-io/milvus/internal/storage"
|
||||
"github.com/milvus-io/milvus/internal/util"
|
||||
"github.com/milvus-io/milvus/internal/util/funcutil"
|
||||
"github.com/milvus-io/milvus/internal/util/indexcgowrapper"
|
||||
"github.com/milvus-io/milvus/internal/util/indexparamcheck"
|
||||
@ -149,42 +148,13 @@ func (it *indexBuildTask) Prepare(ctx context.Context) error {
|
||||
// type params can be removed
|
||||
for _, kvPair := range it.req.GetTypeParams() {
|
||||
key, value := kvPair.GetKey(), kvPair.GetValue()
|
||||
_, ok := typeParams[key]
|
||||
if ok {
|
||||
return errors.New("duplicated key in type params")
|
||||
}
|
||||
if key == util.ParamsKeyToParse {
|
||||
params, err := funcutil.ParseIndexParamsMap(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for pk, pv := range params {
|
||||
typeParams[pk] = pv
|
||||
indexParams[pk] = pv
|
||||
}
|
||||
} else {
|
||||
typeParams[key] = value
|
||||
indexParams[key] = value
|
||||
}
|
||||
typeParams[key] = value
|
||||
indexParams[key] = value
|
||||
}
|
||||
|
||||
for _, kvPair := range it.req.GetIndexParams() {
|
||||
key, value := kvPair.GetKey(), kvPair.GetValue()
|
||||
_, ok := indexParams[key]
|
||||
if ok {
|
||||
return errors.New("duplicated key in index params")
|
||||
}
|
||||
if key == util.ParamsKeyToParse {
|
||||
params, err := funcutil.ParseIndexParamsMap(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for pk, pv := range params {
|
||||
indexParams[pk] = pv
|
||||
}
|
||||
} else {
|
||||
indexParams[key] = value
|
||||
}
|
||||
indexParams[key] = value
|
||||
}
|
||||
it.newTypeParams = typeParams
|
||||
it.newIndexParams = indexParams
|
||||
@ -199,18 +169,6 @@ func (it *indexBuildTask) Prepare(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
logutil.Logger(ctx).Info("Successfully prepare indexBuildTask", zap.Int64("buildID", it.BuildID), zap.Int64("Collection", it.collectionID), zap.Int64("SegmentIf", it.segmentID))
|
||||
// setup chunkmanager
|
||||
// opts := make([]storage.Option, 0)
|
||||
// // TODO: secret access key_id
|
||||
// opts = append(opts, storage.AccessKeyID(it.req.StorageAccessKey))
|
||||
// opts = append(opts, storage.BucketName(it.req.BucketName))
|
||||
// factory := storage.NewChunkManagerFactory("local", "minio", opts...)
|
||||
// var err error
|
||||
// it.cm, err = factory.NewVectorStorageChunkManager(ctx)
|
||||
// if err != nil {
|
||||
// log.Ctx(ctx).Error("init chunk manager failed", zap.Error(err), zap.String("BucketName", it.req.BucketName), zap.String("StorageAccessKey", it.req.StorageAccessKey))
|
||||
// return err
|
||||
// }
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@ -13,13 +13,15 @@ import (
|
||||
func TestIndex_Get(t *testing.T) {
|
||||
var indexes = []*dbmodel.Index{
|
||||
{
|
||||
TenantID: "",
|
||||
FieldID: fieldID1,
|
||||
CollectionID: collID1,
|
||||
IndexID: indexID1,
|
||||
IndexName: "test_index_1",
|
||||
IndexParams: "",
|
||||
TypeParams: "",
|
||||
TenantID: "",
|
||||
FieldID: fieldID1,
|
||||
CollectionID: collID1,
|
||||
IndexID: indexID1,
|
||||
IndexName: "test_index_1",
|
||||
IndexParams: "",
|
||||
TypeParams: "",
|
||||
UserIndexParams: "",
|
||||
IsAutoIndex: false,
|
||||
},
|
||||
}
|
||||
|
||||
@ -51,14 +53,16 @@ func TestIndex_Get_Error(t *testing.T) {
|
||||
func TestIndex_List(t *testing.T) {
|
||||
var indexResults = []*dbmodel.IndexResult{
|
||||
{
|
||||
FieldID: fieldID1,
|
||||
CollectionID: collID1,
|
||||
IndexID: indexID1,
|
||||
IndexName: "test_index_1",
|
||||
TypeParams: "",
|
||||
IndexParams: "",
|
||||
CreateTime: uint64(1011),
|
||||
IsDeleted: false,
|
||||
FieldID: fieldID1,
|
||||
CollectionID: collID1,
|
||||
IndexID: indexID1,
|
||||
IndexName: "test_index_1",
|
||||
TypeParams: "",
|
||||
IndexParams: "",
|
||||
UserIndexParams: "",
|
||||
IsAutoIndex: false,
|
||||
CreateTime: uint64(1011),
|
||||
IsDeleted: false,
|
||||
},
|
||||
}
|
||||
|
||||
@ -66,8 +70,8 @@ func TestIndex_List(t *testing.T) {
|
||||
mock.ExpectQuery("SELECT indexes.field_id AS field_id, indexes.collection_id AS collection_id, indexes.index_id AS index_id, indexes.index_name AS index_name, indexes.index_params AS index_params, indexes.type_params AS type_params, indexes.is_deleted AS is_deleted, indexes.create_time AS create_time FROM `indexes` WHERE indexes.tenant_id = ?").
|
||||
WithArgs(tenantID).
|
||||
WillReturnRows(
|
||||
sqlmock.NewRows([]string{"field_id", "collection_id", "index_id", "index_name", "index_params", "type_params", "is_deleted", "create_time"}).
|
||||
AddRow(indexResults[0].FieldID, indexResults[0].CollectionID, indexResults[0].IndexID, indexResults[0].IndexName, indexResults[0].IndexParams, indexResults[0].TypeParams, indexResults[0].IsDeleted, indexResults[0].CreateTime))
|
||||
sqlmock.NewRows([]string{"field_id", "collection_id", "index_id", "index_name", "index_params", "type_params", "user_index_params", "is_auto_index", "is_deleted", "create_time"}).
|
||||
AddRow(indexResults[0].FieldID, indexResults[0].CollectionID, indexResults[0].IndexID, indexResults[0].IndexName, indexResults[0].IndexParams, indexResults[0].TypeParams, indexResults[0].UserIndexParams, indexResults[0].IsAutoIndex, indexResults[0].IsDeleted, indexResults[0].CreateTime))
|
||||
|
||||
// actual
|
||||
res, err := indexTestDb.List(tenantID)
|
||||
@ -90,24 +94,26 @@ func TestIndex_List_Error(t *testing.T) {
|
||||
func TestIndex_Insert(t *testing.T) {
|
||||
var indexes = []*dbmodel.Index{
|
||||
{
|
||||
TenantID: tenantID,
|
||||
FieldID: fieldID1,
|
||||
CollectionID: collID1,
|
||||
IndexID: indexID1,
|
||||
IndexName: "test_index_1",
|
||||
IndexParams: "",
|
||||
TypeParams: "",
|
||||
CreateTime: uint64(1011),
|
||||
IsDeleted: false,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
TenantID: tenantID,
|
||||
FieldID: fieldID1,
|
||||
CollectionID: collID1,
|
||||
IndexID: indexID1,
|
||||
IndexName: "test_index_1",
|
||||
IndexParams: "",
|
||||
TypeParams: "",
|
||||
UserIndexParams: "",
|
||||
IsAutoIndex: false,
|
||||
CreateTime: uint64(1011),
|
||||
IsDeleted: false,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
}
|
||||
|
||||
// expectation
|
||||
mock.ExpectBegin()
|
||||
mock.ExpectExec("INSERT INTO `indexes` (`tenant_id`,`field_id`,`collection_id`,`index_id`,`index_name`,`index_params`,`type_params`,`create_time`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?)").
|
||||
WithArgs(indexes[0].TenantID, indexes[0].FieldID, indexes[0].CollectionID, indexes[0].IndexID, indexes[0].IndexName, indexes[0].IndexParams, indexes[0].TypeParams, indexes[0].CreateTime, indexes[0].IsDeleted, indexes[0].CreatedAt, indexes[0].UpdatedAt).
|
||||
mock.ExpectExec("INSERT INTO `indexes` (`tenant_id`,`field_id`,`collection_id`,`index_id`,`index_name`,`index_params`,`type_params`,`user_index_params`,`is_auto_index`,`create_time`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
||||
WithArgs(indexes[0].TenantID, indexes[0].FieldID, indexes[0].CollectionID, indexes[0].IndexID, indexes[0].IndexName, indexes[0].IndexParams, indexes[0].TypeParams, indexes[0].UserIndexParams, indexes[0].IsAutoIndex, indexes[0].CreateTime, indexes[0].IsDeleted, indexes[0].CreatedAt, indexes[0].UpdatedAt).
|
||||
WillReturnResult(sqlmock.NewResult(1, 1))
|
||||
mock.ExpectCommit()
|
||||
|
||||
@ -134,8 +140,8 @@ func TestIndex_Insert_Error(t *testing.T) {
|
||||
|
||||
// expectation
|
||||
mock.ExpectBegin()
|
||||
mock.ExpectExec("INSERT INTO `indexes` (`tenant_id`,`field_id`,`collection_id`,`index_id`,`index_name`,`index_params`,`type_params`,`create_time`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?)").
|
||||
WithArgs(indexes[0].TenantID, indexes[0].FieldID, indexes[0].CollectionID, indexes[0].IndexID, indexes[0].IndexName, indexes[0].IndexParams, indexes[0].TypeParams, indexes[0].CreateTime, indexes[0].IsDeleted, indexes[0].CreatedAt, indexes[0].UpdatedAt).
|
||||
mock.ExpectExec("INSERT INTO `indexes` (`tenant_id`,`field_id`,`collection_id`,`index_id`,`index_name`,`index_params`,`type_params`, `user_index_params`, `is_auto_index`, `create_time`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)").
|
||||
WithArgs(indexes[0].TenantID, indexes[0].FieldID, indexes[0].CollectionID, indexes[0].IndexID, indexes[0].IndexName, indexes[0].IndexParams, indexes[0].TypeParams, indexes[0].UserIndexParams, indexes[0].IsAutoIndex, indexes[0].CreateTime, indexes[0].IsDeleted, indexes[0].CreatedAt, indexes[0].UpdatedAt).
|
||||
WillReturnError(errors.New("test error"))
|
||||
mock.ExpectRollback()
|
||||
|
||||
|
||||
@ -14,18 +14,20 @@ import (
|
||||
)
|
||||
|
||||
type Index struct {
|
||||
ID int64 `gorm:"id"`
|
||||
TenantID string `gorm:"tenant_id"`
|
||||
FieldID int64 `gorm:"field_id"`
|
||||
CollectionID int64 `gorm:"collection_id"`
|
||||
IndexID int64 `gorm:"index_id"`
|
||||
IndexName string `gorm:"index_name"`
|
||||
IndexParams string `gorm:"index_params"`
|
||||
TypeParams string `gorm:"type_params"`
|
||||
CreateTime uint64 `gorm:"create_time"`
|
||||
IsDeleted bool `gorm:"is_deleted"`
|
||||
CreatedAt time.Time `gorm:"created_at"`
|
||||
UpdatedAt time.Time `gorm:"updated_at"`
|
||||
ID int64 `gorm:"id"`
|
||||
TenantID string `gorm:"tenant_id"`
|
||||
FieldID int64 `gorm:"field_id"`
|
||||
CollectionID int64 `gorm:"collection_id"`
|
||||
IndexID int64 `gorm:"index_id"`
|
||||
IndexName string `gorm:"index_name"`
|
||||
IndexParams string `gorm:"index_params"`
|
||||
TypeParams string `gorm:"type_params"`
|
||||
UserIndexParams string `gorm:"user_index_params"`
|
||||
IsAutoIndex bool `gorm:"is_auto_index"`
|
||||
CreateTime uint64 `gorm:"create_time"`
|
||||
IsDeleted bool `gorm:"is_deleted"`
|
||||
CreatedAt time.Time `gorm:"created_at"`
|
||||
UpdatedAt time.Time `gorm:"updated_at"`
|
||||
}
|
||||
|
||||
func (v Index) TableName() string {
|
||||
@ -35,14 +37,16 @@ func (v Index) TableName() string {
|
||||
// ------------- search result -------------
|
||||
|
||||
type IndexResult struct {
|
||||
FieldID int64
|
||||
CollectionID int64
|
||||
IndexID int64
|
||||
IndexName string
|
||||
TypeParams string
|
||||
IndexParams string
|
||||
CreateTime uint64
|
||||
IsDeleted bool
|
||||
FieldID int64
|
||||
CollectionID int64
|
||||
IndexID int64
|
||||
IndexName string
|
||||
TypeParams string
|
||||
IndexParams string
|
||||
CreateTime uint64
|
||||
IsDeleted bool
|
||||
IsAutoIndex bool
|
||||
UserIndexParams string
|
||||
}
|
||||
|
||||
//go:generate mockery --name=IIndexDb
|
||||
@ -69,6 +73,15 @@ func UnmarshalIndexModel(inputs []*IndexResult) ([]*model.Index, error) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
var userIndexParams []commonpb.KeyValuePair
|
||||
if ir.UserIndexParams != "" {
|
||||
err := json.Unmarshal([]byte(ir.UserIndexParams), &userIndexParams)
|
||||
if err != nil {
|
||||
log.Error("unmarshal UserIndexParams of index failed", zap.Int64("collID", ir.CollectionID),
|
||||
zap.Int64("indexID", ir.IndexID), zap.String("indexName", ir.IndexName), zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var typeParams []commonpb.KeyValuePair
|
||||
if ir.TypeParams != "" {
|
||||
@ -81,14 +94,16 @@ func UnmarshalIndexModel(inputs []*IndexResult) ([]*model.Index, error) {
|
||||
}
|
||||
|
||||
idx := &model.Index{
|
||||
CollectionID: ir.CollectionID,
|
||||
FieldID: ir.FieldID,
|
||||
IndexID: ir.IndexID,
|
||||
IndexName: ir.IndexName,
|
||||
IndexParams: funcutil.ConvertToKeyValuePairPointer(indexParams),
|
||||
TypeParams: funcutil.ConvertToKeyValuePairPointer(typeParams),
|
||||
CreateTime: ir.CreateTime,
|
||||
IsDeleted: ir.IsDeleted,
|
||||
CollectionID: ir.CollectionID,
|
||||
FieldID: ir.FieldID,
|
||||
IndexID: ir.IndexID,
|
||||
IndexName: ir.IndexName,
|
||||
IndexParams: funcutil.ConvertToKeyValuePairPointer(indexParams),
|
||||
TypeParams: funcutil.ConvertToKeyValuePairPointer(typeParams),
|
||||
CreateTime: ir.CreateTime,
|
||||
IsDeleted: ir.IsDeleted,
|
||||
IsAutoIndex: ir.IsAutoIndex,
|
||||
UserIndexParams: funcutil.ConvertToKeyValuePairPointer(userIndexParams),
|
||||
}
|
||||
result = append(result, idx)
|
||||
}
|
||||
|
||||
@ -36,6 +36,14 @@ func (tc *Catalog) CreateIndex(ctx context.Context, index *model.Index) error {
|
||||
return err
|
||||
}
|
||||
|
||||
userIndexParamsBytes, err := json.Marshal(index.UserIndexParams)
|
||||
if err != nil {
|
||||
log.Error("marshal userIndexParams of index failed", zap.String("tenant", tenantID),
|
||||
zap.Int64("collID", index.CollectionID), zap.Int64("indexID", index.IndexID),
|
||||
zap.String("indexName", index.IndexName), zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
typeParamsBytes, err := json.Marshal(index.TypeParams)
|
||||
if err != nil {
|
||||
log.Error("marshal TypeParams of index failed", zap.String("tenant", tenantID),
|
||||
@ -45,15 +53,17 @@ func (tc *Catalog) CreateIndex(ctx context.Context, index *model.Index) error {
|
||||
}
|
||||
|
||||
idx := &dbmodel.Index{
|
||||
TenantID: tenantID,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexID: index.IndexID,
|
||||
IndexName: index.IndexName,
|
||||
TypeParams: string(typeParamsBytes),
|
||||
IndexParams: string(indexParamsBytes),
|
||||
CreateTime: index.CreateTime,
|
||||
IsDeleted: index.IsDeleted,
|
||||
TenantID: tenantID,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexID: index.IndexID,
|
||||
IndexName: index.IndexName,
|
||||
TypeParams: string(typeParamsBytes),
|
||||
IndexParams: string(indexParamsBytes),
|
||||
CreateTime: index.CreateTime,
|
||||
IsDeleted: index.IsDeleted,
|
||||
IsAutoIndex: index.IsAutoIndex,
|
||||
UserIndexParams: string(userIndexParamsBytes),
|
||||
}
|
||||
|
||||
err = tc.metaDomain.IndexDb(ctx).Insert([]*dbmodel.Index{idx})
|
||||
@ -92,6 +102,14 @@ func (tc *Catalog) AlterIndex(ctx context.Context, index *model.Index) error {
|
||||
return err
|
||||
}
|
||||
|
||||
userIndexParamsBytes, err := json.Marshal(index.UserIndexParams)
|
||||
if err != nil {
|
||||
log.Error("marshal userIndexParams of index failed", zap.String("tenant", tenantID),
|
||||
zap.Int64("collID", index.CollectionID), zap.Int64("indexID", index.IndexID),
|
||||
zap.String("indexName", index.IndexName), zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
typeParamsBytes, err := json.Marshal(index.TypeParams)
|
||||
if err != nil {
|
||||
log.Error("marshal TypeParams of index failed", zap.String("tenant", tenantID),
|
||||
@ -101,15 +119,17 @@ func (tc *Catalog) AlterIndex(ctx context.Context, index *model.Index) error {
|
||||
}
|
||||
|
||||
idx := &dbmodel.Index{
|
||||
TenantID: tenantID,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexID: index.IndexID,
|
||||
IndexName: index.IndexName,
|
||||
TypeParams: string(typeParamsBytes),
|
||||
IndexParams: string(indexParamsBytes),
|
||||
CreateTime: index.CreateTime,
|
||||
IsDeleted: index.IsDeleted,
|
||||
TenantID: tenantID,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexID: index.IndexID,
|
||||
IndexName: index.IndexName,
|
||||
TypeParams: string(typeParamsBytes),
|
||||
IndexParams: string(indexParamsBytes),
|
||||
CreateTime: index.CreateTime,
|
||||
IsDeleted: index.IsDeleted,
|
||||
IsAutoIndex: index.IsAutoIndex,
|
||||
UserIndexParams: string(userIndexParamsBytes),
|
||||
}
|
||||
err = tc.metaDomain.IndexDb(ctx).Update(idx)
|
||||
if err != nil {
|
||||
@ -131,6 +151,13 @@ func (tc *Catalog) AlterIndexes(ctx context.Context, indexes []*model.Index) err
|
||||
return err
|
||||
}
|
||||
|
||||
userIndexParamsBytes, err := json.Marshal(index.UserIndexParams)
|
||||
if err != nil {
|
||||
log.Error("marshal userIndexParams of index failed", zap.String("tenant", tenantID),
|
||||
zap.Int64("collID", index.CollectionID), zap.Int64("indexID", index.IndexID),
|
||||
zap.String("indexName", index.IndexName), zap.Error(err))
|
||||
return err
|
||||
}
|
||||
typeParamsBytes, err := json.Marshal(index.TypeParams)
|
||||
if err != nil {
|
||||
log.Error("marshal TypeParams of index failed", zap.String("tenant", tenantID),
|
||||
@ -140,15 +167,17 @@ func (tc *Catalog) AlterIndexes(ctx context.Context, indexes []*model.Index) err
|
||||
}
|
||||
|
||||
idx := &dbmodel.Index{
|
||||
TenantID: tenantID,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexID: index.IndexID,
|
||||
IndexName: index.IndexName,
|
||||
TypeParams: string(typeParamsBytes),
|
||||
IndexParams: string(indexParamsBytes),
|
||||
CreateTime: index.CreateTime,
|
||||
IsDeleted: index.IsDeleted,
|
||||
TenantID: tenantID,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexID: index.IndexID,
|
||||
IndexName: index.IndexName,
|
||||
TypeParams: string(typeParamsBytes),
|
||||
IndexParams: string(indexParamsBytes),
|
||||
CreateTime: index.CreateTime,
|
||||
IsDeleted: index.IsDeleted,
|
||||
IsAutoIndex: index.IsAutoIndex,
|
||||
UserIndexParams: string(userIndexParamsBytes),
|
||||
}
|
||||
err = tc.metaDomain.IndexDb(ctx).Update(idx)
|
||||
if err != nil {
|
||||
|
||||
@ -7,15 +7,17 @@ import (
|
||||
)
|
||||
|
||||
type Index struct {
|
||||
TenantID string
|
||||
CollectionID int64
|
||||
FieldID int64
|
||||
IndexID int64
|
||||
IndexName string
|
||||
IsDeleted bool
|
||||
CreateTime uint64
|
||||
TypeParams []*commonpb.KeyValuePair
|
||||
IndexParams []*commonpb.KeyValuePair
|
||||
TenantID string
|
||||
CollectionID int64
|
||||
FieldID int64
|
||||
IndexID int64
|
||||
IndexName string
|
||||
IsDeleted bool
|
||||
CreateTime uint64
|
||||
TypeParams []*commonpb.KeyValuePair
|
||||
IndexParams []*commonpb.KeyValuePair
|
||||
IsAutoIndex bool
|
||||
UserIndexParams []*commonpb.KeyValuePair
|
||||
}
|
||||
|
||||
func UnmarshalIndexModel(indexInfo *indexpb.FieldIndex) *Index {
|
||||
@ -24,14 +26,16 @@ func UnmarshalIndexModel(indexInfo *indexpb.FieldIndex) *Index {
|
||||
}
|
||||
|
||||
return &Index{
|
||||
CollectionID: indexInfo.IndexInfo.GetCollectionID(),
|
||||
FieldID: indexInfo.IndexInfo.GetFieldID(),
|
||||
IndexID: indexInfo.IndexInfo.GetIndexID(),
|
||||
IndexName: indexInfo.IndexInfo.GetIndexName(),
|
||||
IsDeleted: indexInfo.GetDeleted(),
|
||||
CreateTime: indexInfo.CreateTime,
|
||||
TypeParams: indexInfo.IndexInfo.GetTypeParams(),
|
||||
IndexParams: indexInfo.IndexInfo.GetIndexParams(),
|
||||
CollectionID: indexInfo.IndexInfo.GetCollectionID(),
|
||||
FieldID: indexInfo.IndexInfo.GetFieldID(),
|
||||
IndexID: indexInfo.IndexInfo.GetIndexID(),
|
||||
IndexName: indexInfo.IndexInfo.GetIndexName(),
|
||||
IsDeleted: indexInfo.GetDeleted(),
|
||||
CreateTime: indexInfo.CreateTime,
|
||||
TypeParams: indexInfo.IndexInfo.GetTypeParams(),
|
||||
IndexParams: indexInfo.IndexInfo.GetIndexParams(),
|
||||
IsAutoIndex: indexInfo.IndexInfo.GetIsAutoIndex(),
|
||||
UserIndexParams: indexInfo.IndexInfo.GetUserIndexParams(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,12 +46,14 @@ func MarshalIndexModel(index *Index) *indexpb.FieldIndex {
|
||||
|
||||
return &indexpb.FieldIndex{
|
||||
IndexInfo: &indexpb.IndexInfo{
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexName: index.IndexName,
|
||||
IndexID: index.IndexID,
|
||||
TypeParams: index.TypeParams,
|
||||
IndexParams: index.IndexParams,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexName: index.IndexName,
|
||||
IndexID: index.IndexID,
|
||||
TypeParams: index.TypeParams,
|
||||
IndexParams: index.IndexParams,
|
||||
IsAutoIndex: index.IsAutoIndex,
|
||||
UserIndexParams: index.UserIndexParams,
|
||||
},
|
||||
Deleted: index.IsDeleted,
|
||||
CreateTime: index.CreateTime,
|
||||
@ -106,15 +112,17 @@ func MarshalIndexModel(index *Index) *indexpb.FieldIndex {
|
||||
|
||||
func CloneIndex(index *Index) *Index {
|
||||
clonedIndex := &Index{
|
||||
TenantID: index.TenantID,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexID: index.IndexID,
|
||||
IndexName: index.IndexName,
|
||||
IsDeleted: index.IsDeleted,
|
||||
CreateTime: index.CreateTime,
|
||||
TypeParams: make([]*commonpb.KeyValuePair, len(index.TypeParams)),
|
||||
IndexParams: make([]*commonpb.KeyValuePair, len(index.IndexParams)),
|
||||
TenantID: index.TenantID,
|
||||
CollectionID: index.CollectionID,
|
||||
FieldID: index.FieldID,
|
||||
IndexID: index.IndexID,
|
||||
IndexName: index.IndexName,
|
||||
IsDeleted: index.IsDeleted,
|
||||
CreateTime: index.CreateTime,
|
||||
TypeParams: make([]*commonpb.KeyValuePair, len(index.TypeParams)),
|
||||
IndexParams: make([]*commonpb.KeyValuePair, len(index.IndexParams)),
|
||||
IsAutoIndex: index.IsAutoIndex,
|
||||
UserIndexParams: make([]*commonpb.KeyValuePair, len(index.UserIndexParams)),
|
||||
}
|
||||
for i, param := range index.TypeParams {
|
||||
clonedIndex.TypeParams[i] = proto.Clone(param).(*commonpb.KeyValuePair)
|
||||
@ -122,5 +130,8 @@ func CloneIndex(index *Index) *Index {
|
||||
for i, param := range index.IndexParams {
|
||||
clonedIndex.IndexParams[i] = proto.Clone(param).(*commonpb.KeyValuePair)
|
||||
}
|
||||
for i, param := range index.UserIndexParams {
|
||||
clonedIndex.UserIndexParams[i] = proto.Clone(param).(*commonpb.KeyValuePair)
|
||||
}
|
||||
return clonedIndex
|
||||
}
|
||||
|
||||
@ -52,6 +52,8 @@ message IndexInfo {
|
||||
// index state
|
||||
common.IndexState state = 9;
|
||||
string index_state_fail_reason = 10;
|
||||
bool is_auto_index = 11;
|
||||
repeated common.KeyValuePair user_index_params = 12;
|
||||
}
|
||||
|
||||
message FieldIndex {
|
||||
@ -124,6 +126,8 @@ message CreateIndexRequest {
|
||||
repeated common.KeyValuePair type_params = 4;
|
||||
repeated common.KeyValuePair index_params = 5;
|
||||
uint64 timestamp = 6;
|
||||
bool is_auto_index = 7;
|
||||
repeated common.KeyValuePair user_index_params = 8;
|
||||
}
|
||||
|
||||
message GetIndexInfoRequest {
|
||||
|
||||
@ -38,11 +38,13 @@ type IndexInfo struct {
|
||||
IndexedRows int64 `protobuf:"varint,7,opt,name=indexed_rows,json=indexedRows,proto3" json:"indexed_rows,omitempty"`
|
||||
TotalRows int64 `protobuf:"varint,8,opt,name=total_rows,json=totalRows,proto3" json:"total_rows,omitempty"`
|
||||
// index state
|
||||
State commonpb.IndexState `protobuf:"varint,9,opt,name=state,proto3,enum=milvus.proto.common.IndexState" json:"state,omitempty"`
|
||||
IndexStateFailReason string `protobuf:"bytes,10,opt,name=index_state_fail_reason,json=indexStateFailReason,proto3" json:"index_state_fail_reason,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
State commonpb.IndexState `protobuf:"varint,9,opt,name=state,proto3,enum=milvus.proto.common.IndexState" json:"state,omitempty"`
|
||||
IndexStateFailReason string `protobuf:"bytes,10,opt,name=index_state_fail_reason,json=indexStateFailReason,proto3" json:"index_state_fail_reason,omitempty"`
|
||||
IsAutoIndex bool `protobuf:"varint,11,opt,name=is_auto_index,json=isAutoIndex,proto3" json:"is_auto_index,omitempty"`
|
||||
UserIndexParams []*commonpb.KeyValuePair `protobuf:"bytes,12,rep,name=user_index_params,json=userIndexParams,proto3" json:"user_index_params,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *IndexInfo) Reset() { *m = IndexInfo{} }
|
||||
@ -140,6 +142,20 @@ func (m *IndexInfo) GetIndexStateFailReason() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *IndexInfo) GetIsAutoIndex() bool {
|
||||
if m != nil {
|
||||
return m.IsAutoIndex
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *IndexInfo) GetUserIndexParams() []*commonpb.KeyValuePair {
|
||||
if m != nil {
|
||||
return m.UserIndexParams
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type FieldIndex struct {
|
||||
IndexInfo *IndexInfo `protobuf:"bytes,1,opt,name=index_info,json=indexInfo,proto3" json:"index_info,omitempty"`
|
||||
Deleted bool `protobuf:"varint,2,opt,name=deleted,proto3" json:"deleted,omitempty"`
|
||||
@ -714,6 +730,8 @@ type CreateIndexRequest struct {
|
||||
TypeParams []*commonpb.KeyValuePair `protobuf:"bytes,4,rep,name=type_params,json=typeParams,proto3" json:"type_params,omitempty"`
|
||||
IndexParams []*commonpb.KeyValuePair `protobuf:"bytes,5,rep,name=index_params,json=indexParams,proto3" json:"index_params,omitempty"`
|
||||
Timestamp uint64 `protobuf:"varint,6,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
|
||||
IsAutoIndex bool `protobuf:"varint,7,opt,name=is_auto_index,json=isAutoIndex,proto3" json:"is_auto_index,omitempty"`
|
||||
UserIndexParams []*commonpb.KeyValuePair `protobuf:"bytes,8,rep,name=user_index_params,json=userIndexParams,proto3" json:"user_index_params,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
@ -786,6 +804,20 @@ func (m *CreateIndexRequest) GetTimestamp() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *CreateIndexRequest) GetIsAutoIndex() bool {
|
||||
if m != nil {
|
||||
return m.IsAutoIndex
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *CreateIndexRequest) GetUserIndexParams() []*commonpb.KeyValuePair {
|
||||
if m != nil {
|
||||
return m.UserIndexParams
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type GetIndexInfoRequest struct {
|
||||
CollectionID int64 `protobuf:"varint,1,opt,name=collectionID,proto3" json:"collectionID,omitempty"`
|
||||
SegmentIDs []int64 `protobuf:"varint,2,rep,packed,name=segmentIDs,proto3" json:"segmentIDs,omitempty"`
|
||||
@ -1980,138 +2012,141 @@ func init() {
|
||||
func init() { proto.RegisterFile("index_coord.proto", fileDescriptor_f9e019eb3fda53c2) }
|
||||
|
||||
var fileDescriptor_f9e019eb3fda53c2 = []byte{
|
||||
// 2089 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x59, 0x5b, 0x6f, 0x23, 0x49,
|
||||
0x15, 0x9e, 0x76, 0xe7, 0xe2, 0x3e, 0xed, 0x5c, 0xa6, 0x26, 0x0b, 0x5e, 0x4f, 0x86, 0x49, 0x7a,
|
||||
0x98, 0x19, 0x83, 0xb4, 0xc9, 0x90, 0x65, 0xd1, 0x82, 0x00, 0x29, 0x89, 0x77, 0x66, 0x9d, 0xd9,
|
||||
0x44, 0xa1, 0x3d, 0x5a, 0x69, 0x57, 0x48, 0x4d, 0xdb, 0x5d, 0x4e, 0x6a, 0x63, 0x77, 0x79, 0xba,
|
||||
0xca, 0x33, 0x93, 0x41, 0x42, 0xbc, 0xf0, 0xb0, 0x08, 0x09, 0x89, 0x07, 0x10, 0xef, 0x3c, 0x2d,
|
||||
0x12, 0x3f, 0x00, 0x21, 0xf1, 0x03, 0xe0, 0x27, 0xf0, 0xc4, 0x2f, 0x41, 0x75, 0xe9, 0x76, 0x77,
|
||||
0xbb, 0x1d, 0x3b, 0x17, 0x9e, 0xf6, 0xcd, 0x75, 0xfa, 0xd4, 0xa5, 0xcf, 0x77, 0xea, 0x7c, 0xdf,
|
||||
0x69, 0xc3, 0x6d, 0x12, 0x06, 0xf8, 0x8d, 0xd7, 0xa1, 0x34, 0x0a, 0xb6, 0x06, 0x11, 0xe5, 0x14,
|
||||
0xa1, 0x3e, 0xe9, 0xbd, 0x1a, 0x32, 0x35, 0xda, 0x92, 0xcf, 0x6b, 0x95, 0x0e, 0xed, 0xf7, 0x69,
|
||||
0xa8, 0x6c, 0xb5, 0x65, 0x12, 0x72, 0x1c, 0x85, 0x7e, 0x4f, 0x8f, 0x2b, 0xe9, 0x19, 0xce, 0x3f,
|
||||
0x4c, 0xb0, 0x9a, 0x62, 0x56, 0x33, 0xec, 0x52, 0xe4, 0x40, 0xa5, 0x43, 0x7b, 0x3d, 0xdc, 0xe1,
|
||||
0x84, 0x86, 0xcd, 0x46, 0xd5, 0xd8, 0x30, 0xea, 0xa6, 0x9b, 0xb1, 0xa1, 0x2a, 0x2c, 0x76, 0x09,
|
||||
0xee, 0x05, 0xcd, 0x46, 0xb5, 0x24, 0x1f, 0xc7, 0x43, 0x74, 0x0f, 0x40, 0x1d, 0x30, 0xf4, 0xfb,
|
||||
0xb8, 0x6a, 0x6e, 0x18, 0x75, 0xcb, 0xb5, 0xa4, 0xe5, 0xc8, 0xef, 0x63, 0x31, 0x51, 0x0e, 0x9a,
|
||||
0x8d, 0xea, 0x9c, 0x9a, 0xa8, 0x87, 0x68, 0x0f, 0x6c, 0x7e, 0x3e, 0xc0, 0xde, 0xc0, 0x8f, 0xfc,
|
||||
0x3e, 0xab, 0xce, 0x6f, 0x98, 0x75, 0x7b, 0x67, 0x73, 0x2b, 0xf3, 0x6a, 0xfa, 0x9d, 0x9e, 0xe3,
|
||||
0xf3, 0x4f, 0xfd, 0xde, 0x10, 0x1f, 0xfb, 0x24, 0x72, 0x41, 0xcc, 0x3a, 0x96, 0x93, 0x50, 0x03,
|
||||
0x2a, 0x6a, 0x73, 0xbd, 0xc8, 0xc2, 0xac, 0x8b, 0xd8, 0x72, 0x9a, 0x5e, 0x65, 0x53, 0xaf, 0x82,
|
||||
0x03, 0x2f, 0xa2, 0xaf, 0x59, 0x75, 0x51, 0x1e, 0xd4, 0xd6, 0x36, 0x97, 0xbe, 0x66, 0xe2, 0x2d,
|
||||
0x39, 0xe5, 0x7e, 0x4f, 0x39, 0x94, 0xa5, 0x83, 0x25, 0x2d, 0xf2, 0xf1, 0x07, 0x30, 0xcf, 0xb8,
|
||||
0xcf, 0x71, 0xd5, 0xda, 0x30, 0xea, 0xcb, 0x3b, 0xf7, 0x0b, 0x0f, 0x20, 0x23, 0xde, 0x12, 0x6e,
|
||||
0xae, 0xf2, 0x46, 0x1f, 0xc0, 0x37, 0xd5, 0xf1, 0xe5, 0xd0, 0xeb, 0xfa, 0xa4, 0xe7, 0x45, 0xd8,
|
||||
0x67, 0x34, 0xac, 0x82, 0x0c, 0xe4, 0x1a, 0x49, 0xe6, 0x3c, 0xf5, 0x49, 0xcf, 0x95, 0xcf, 0x9c,
|
||||
0xdf, 0x18, 0x00, 0x4f, 0x65, 0xf8, 0xc5, 0x53, 0xf4, 0xe3, 0x18, 0x01, 0x12, 0x76, 0xa9, 0x44,
|
||||
0xcf, 0xde, 0xb9, 0xb7, 0x35, 0x9e, 0x22, 0x5b, 0x09, 0xe4, 0x1a, 0x20, 0x89, 0x7e, 0x15, 0x16,
|
||||
0x03, 0xdc, 0xc3, 0x1c, 0x07, 0x12, 0xd9, 0xb2, 0x1b, 0x0f, 0xd1, 0x7d, 0xb0, 0x3b, 0x11, 0x16,
|
||||
0x07, 0xe3, 0x44, 0x43, 0x3b, 0xe7, 0x82, 0x32, 0xbd, 0x20, 0x7d, 0xec, 0x7c, 0x39, 0x07, 0x95,
|
||||
0x16, 0x3e, 0xe9, 0xe3, 0x90, 0xab, 0x93, 0xcc, 0x92, 0x49, 0x1b, 0x60, 0x0f, 0xfc, 0x88, 0x13,
|
||||
0xed, 0xa2, 0xb2, 0x29, 0x6d, 0x42, 0xeb, 0x60, 0x31, 0xbd, 0x6a, 0x43, 0xee, 0x6a, 0xba, 0x23,
|
||||
0x03, 0x7a, 0x17, 0xca, 0xe1, 0xb0, 0xaf, 0x70, 0xd0, 0x19, 0x15, 0x0e, 0xfb, 0x12, 0x85, 0x54,
|
||||
0xae, 0xcd, 0x67, 0x73, 0xad, 0x0a, 0x8b, 0xed, 0x21, 0x91, 0xe9, 0xbb, 0xa0, 0x9e, 0xe8, 0x21,
|
||||
0xfa, 0x06, 0x2c, 0x84, 0x34, 0xc0, 0xcd, 0x86, 0x46, 0x5d, 0x8f, 0xd0, 0x03, 0x58, 0x52, 0x41,
|
||||
0x7d, 0x85, 0x23, 0x46, 0x68, 0xa8, 0x31, 0x57, 0x89, 0xf2, 0xa9, 0xb2, 0x5d, 0x15, 0xf6, 0xfb,
|
||||
0x60, 0x8f, 0x43, 0x0d, 0xdd, 0x04, 0x60, 0xf4, 0xdd, 0xf8, 0xd2, 0x77, 0x49, 0x0f, 0x33, 0x6f,
|
||||
0xe0, 0xf3, 0x53, 0x56, 0xb5, 0x37, 0xcc, 0xba, 0xe5, 0xae, 0xc8, 0x07, 0x4f, 0x85, 0xfd, 0x58,
|
||||
0x98, 0xd3, 0xf8, 0x55, 0x2e, 0xc4, 0x6f, 0x29, 0x8f, 0x1f, 0x7a, 0x08, 0xcb, 0x0c, 0x47, 0xc4,
|
||||
0xef, 0x91, 0xb7, 0xd8, 0x63, 0xe4, 0x2d, 0xae, 0x2e, 0x4b, 0x9f, 0xa5, 0xc4, 0xda, 0x22, 0x6f,
|
||||
0xb1, 0x08, 0xc5, 0xeb, 0x88, 0x70, 0xec, 0x9d, 0xfa, 0x61, 0x40, 0xbb, 0xdd, 0xea, 0x8a, 0xdc,
|
||||
0xa7, 0x22, 0x8d, 0x1f, 0x2b, 0x9b, 0xf3, 0x27, 0x03, 0xee, 0xb8, 0xf8, 0x84, 0x30, 0x8e, 0xa3,
|
||||
0x23, 0x1a, 0x60, 0x17, 0xbf, 0x1c, 0x62, 0xc6, 0xd1, 0x13, 0x98, 0x6b, 0xfb, 0x0c, 0xeb, 0xb4,
|
||||
0x5c, 0x2f, 0x8c, 0xd0, 0x21, 0x3b, 0xd9, 0xf3, 0x19, 0x76, 0xa5, 0x27, 0xfa, 0x01, 0x2c, 0xfa,
|
||||
0x41, 0x10, 0x61, 0xc6, 0x64, 0x72, 0x4c, 0x9a, 0xb4, 0xab, 0x7c, 0xdc, 0xd8, 0x39, 0x85, 0xa4,
|
||||
0x99, 0x46, 0xd2, 0xf9, 0xbd, 0x01, 0x6b, 0xd9, 0x93, 0xb1, 0x01, 0x0d, 0x19, 0x46, 0xef, 0xc3,
|
||||
0x82, 0xc0, 0x63, 0xc8, 0xf4, 0xe1, 0xee, 0x16, 0xee, 0xd3, 0x92, 0x2e, 0xae, 0x76, 0x15, 0x55,
|
||||
0x8b, 0x84, 0x84, 0xc7, 0x05, 0x47, 0x9d, 0x70, 0x33, 0x7f, 0xdb, 0x74, 0xed, 0x6d, 0x86, 0x84,
|
||||
0xab, 0x1a, 0xe3, 0x02, 0x49, 0x7e, 0x3b, 0x9f, 0xc1, 0xda, 0x33, 0xcc, 0x53, 0x79, 0xa1, 0x63,
|
||||
0x35, 0xcb, 0xf5, 0xc9, 0x96, 0xdb, 0x52, 0xae, 0xdc, 0x3a, 0x7f, 0x31, 0xe0, 0x9d, 0xdc, 0xda,
|
||||
0xd7, 0x79, 0xdb, 0x24, 0xc1, 0x4b, 0xd7, 0x49, 0x70, 0x33, 0x9f, 0xe0, 0xce, 0xaf, 0x0d, 0xb8,
|
||||
0xfb, 0x0c, 0xf3, 0x74, 0xf1, 0xb8, 0xe1, 0x48, 0xa0, 0x6f, 0x01, 0x24, 0x45, 0x83, 0x55, 0xcd,
|
||||
0x0d, 0xb3, 0x6e, 0xba, 0x29, 0x8b, 0xf3, 0xa5, 0x01, 0xb7, 0xc7, 0xf6, 0xcf, 0xd6, 0x1e, 0x23,
|
||||
0x5f, 0x7b, 0xfe, 0x5f, 0xe1, 0xf8, 0x83, 0x01, 0xeb, 0xc5, 0xe1, 0xb8, 0x0e, 0x78, 0x3f, 0x51,
|
||||
0x93, 0xb0, 0xc8, 0x52, 0x41, 0x8b, 0x0f, 0x8b, 0x38, 0x61, 0x7c, 0x4f, 0x3d, 0xc9, 0xf9, 0x73,
|
||||
0x09, 0xd0, 0xbe, 0x2c, 0x16, 0xf2, 0xe1, 0x65, 0xa0, 0xb9, 0xb2, 0x5a, 0xc8, 0x69, 0x82, 0xb9,
|
||||
0x9b, 0xd0, 0x04, 0xf3, 0x57, 0xd2, 0x04, 0xeb, 0x60, 0x89, 0xaa, 0xc9, 0xb8, 0xdf, 0x1f, 0x48,
|
||||
0xce, 0x98, 0x73, 0x47, 0x06, 0xe7, 0x0d, 0xdc, 0x89, 0x6f, 0x99, 0xe4, 0xd3, 0x4b, 0xc4, 0x26,
|
||||
0x9b, 0x97, 0xa5, 0x7c, 0x5e, 0x4e, 0x89, 0x90, 0xf3, 0x9f, 0x12, 0xdc, 0x6e, 0xc6, 0x14, 0x20,
|
||||
0x18, 0x40, 0x92, 0xf8, 0xc5, 0x69, 0x3b, 0x19, 0x8e, 0x14, 0x63, 0x9a, 0x13, 0x19, 0x73, 0x2e,
|
||||
0xcb, 0x98, 0xd9, 0x03, 0xce, 0xe7, 0x21, 0xbc, 0x19, 0x49, 0x56, 0x87, 0xd5, 0x11, 0x03, 0x6a,
|
||||
0x02, 0x5c, 0x94, 0x04, 0xb8, 0x4c, 0xd2, 0x6f, 0xcf, 0xd0, 0x63, 0x58, 0x49, 0xe8, 0x2a, 0x50,
|
||||
0x2c, 0x56, 0x96, 0x70, 0x8d, 0xb8, 0x2d, 0x88, 0x69, 0x2c, 0xcb, 0xe8, 0xd6, 0x38, 0xa3, 0x3b,
|
||||
0x7f, 0x37, 0xc0, 0x4e, 0xae, 0xc4, 0x8c, 0xda, 0x38, 0x13, 0xfc, 0x52, 0x3e, 0xf8, 0x9b, 0x50,
|
||||
0xc1, 0xa1, 0xdf, 0xee, 0x61, 0x4f, 0x6e, 0x24, 0xe3, 0x5c, 0x76, 0x6d, 0x65, 0x53, 0xb2, 0xe9,
|
||||
0x29, 0xd8, 0x23, 0x01, 0x17, 0x67, 0xfd, 0xc3, 0x89, 0x0a, 0x2e, 0x8d, 0xbc, 0x0b, 0x89, 0x92,
|
||||
0x63, 0xce, 0x6f, 0x4b, 0x23, 0x62, 0x51, 0x69, 0x79, 0x9d, 0xf2, 0xf1, 0x73, 0xa8, 0xe8, 0xb7,
|
||||
0x50, 0xc2, 0x52, 0x15, 0x91, 0x1f, 0x16, 0x1d, 0xab, 0x68, 0xd3, 0xad, 0x54, 0x18, 0x3f, 0x0a,
|
||||
0x79, 0x74, 0xee, 0xda, 0x6c, 0x64, 0xa9, 0x79, 0xb0, 0x9a, 0x77, 0x40, 0xab, 0x60, 0x9e, 0xe1,
|
||||
0x73, 0x1d, 0x63, 0xf1, 0x53, 0x14, 0xdc, 0x57, 0x22, 0x41, 0x34, 0xcf, 0xde, 0xbf, 0xb0, 0x82,
|
||||
0x75, 0xa9, 0xab, 0xbc, 0x7f, 0x54, 0xfa, 0xd0, 0x70, 0xce, 0x61, 0xb5, 0x11, 0xd1, 0xc1, 0xa5,
|
||||
0x6b, 0x97, 0x03, 0x95, 0x94, 0x18, 0x8d, 0x6f, 0x68, 0xc6, 0x36, 0xed, 0x8e, 0x7e, 0x06, 0x6b,
|
||||
0x0d, 0xcc, 0x3a, 0x11, 0x69, 0x5f, 0xbe, 0x74, 0x4e, 0xe1, 0xf7, 0xdf, 0x19, 0xf0, 0x4e, 0x6e,
|
||||
0xed, 0xeb, 0x60, 0xfc, 0xd3, 0x6c, 0xe6, 0x29, 0x88, 0xa7, 0xf4, 0x0e, 0xe9, 0x8c, 0xf3, 0x25,
|
||||
0x6f, 0xc9, 0x67, 0x7b, 0xa2, 0x3c, 0x1c, 0x47, 0xf4, 0x44, 0xaa, 0xb2, 0x9b, 0x7b, 0xe3, 0x3f,
|
||||
0x1a, 0x70, 0x6f, 0xc2, 0x1e, 0xd7, 0x79, 0xf3, 0x7c, 0xcf, 0x57, 0x9a, 0xd6, 0xf3, 0x99, 0xb9,
|
||||
0x9e, 0xcf, 0xf9, 0x6b, 0x09, 0x96, 0x5a, 0x9c, 0x46, 0xfe, 0x09, 0xde, 0xa7, 0x61, 0x97, 0x9c,
|
||||
0x88, 0x9a, 0x19, 0x2b, 0x57, 0x43, 0xbe, 0x46, 0xa2, 0x4d, 0x37, 0xa1, 0xe2, 0x77, 0x3a, 0x98,
|
||||
0x31, 0xef, 0x0c, 0x9f, 0xeb, 0x2a, 0x61, 0xb9, 0xb6, 0xb2, 0x3d, 0x17, 0x26, 0xa1, 0xf9, 0x19,
|
||||
0xee, 0x44, 0x98, 0x7b, 0x23, 0x4f, 0x9d, 0x5a, 0x2b, 0xea, 0xc1, 0x6e, 0xec, 0x2d, 0xa4, 0xee,
|
||||
0x90, 0xe1, 0x56, 0xeb, 0x13, 0x59, 0x9b, 0xcb, 0xae, 0x1e, 0x09, 0xa1, 0xd1, 0x1e, 0x76, 0xce,
|
||||
0x30, 0x4f, 0xd7, 0x66, 0x50, 0x26, 0x59, 0x9c, 0xef, 0x82, 0x15, 0x51, 0xca, 0x65, 0x41, 0x95,
|
||||
0xac, 0x66, 0xb9, 0x65, 0x61, 0x10, 0xe5, 0x44, 0xaf, 0xda, 0xdc, 0x3d, 0x94, 0xad, 0x90, 0x5a,
|
||||
0xb5, 0xb9, 0x7b, 0x28, 0x3a, 0xb6, 0xe6, 0xee, 0xe1, 0x47, 0x61, 0x30, 0xa0, 0x24, 0xe4, 0xb2,
|
||||
0xba, 0x5a, 0x6e, 0xda, 0x24, 0x5e, 0x8f, 0xa9, 0x48, 0x78, 0x82, 0x88, 0x65, 0x65, 0xb5, 0x5c,
|
||||
0x5b, 0xdb, 0x5e, 0x9c, 0x0f, 0xb0, 0xf3, 0x5f, 0x13, 0x56, 0x95, 0x9a, 0x38, 0xa0, 0xed, 0x38,
|
||||
0x3d, 0xd6, 0xc1, 0xea, 0xf4, 0x86, 0x42, 0x98, 0xeb, 0xdc, 0xb0, 0xdc, 0x91, 0x21, 0xdb, 0x05,
|
||||
0x79, 0x83, 0x08, 0x77, 0xc9, 0x1b, 0x1d, 0xb9, 0x51, 0x17, 0x74, 0x2c, 0xcd, 0x69, 0xba, 0x32,
|
||||
0xc7, 0xe8, 0x2a, 0xf0, 0xb9, 0xaf, 0x39, 0x64, 0x4e, 0x72, 0x88, 0x25, 0x2c, 0x8a, 0x3e, 0xc6,
|
||||
0x58, 0x61, 0xbe, 0xa0, 0xcf, 0x4b, 0xd1, 0xe4, 0x42, 0x96, 0x26, 0xb3, 0xc9, 0xbb, 0x98, 0x27,
|
||||
0xc3, 0x8f, 0x61, 0x39, 0x0e, 0x4c, 0x47, 0xe6, 0x88, 0x8c, 0x5e, 0x41, 0xc3, 0x20, 0x0b, 0x59,
|
||||
0x3a, 0x99, 0xdc, 0x25, 0x96, 0xc9, 0xad, 0x3c, 0xad, 0x5a, 0x57, 0xa2, 0xd5, 0x9c, 0xbe, 0x82,
|
||||
0xab, 0xe8, 0xab, 0x74, 0x03, 0x6e, 0x67, 0x1a, 0x70, 0xe7, 0x13, 0x58, 0xfd, 0xd9, 0x10, 0x47,
|
||||
0xe7, 0x07, 0xb4, 0xcd, 0x66, 0xc3, 0xb8, 0x06, 0x65, 0x0d, 0x54, 0x5c, 0x69, 0x93, 0xb1, 0xf3,
|
||||
0x2f, 0x03, 0x96, 0xe4, 0xb5, 0x7f, 0xe1, 0xb3, 0xb3, 0xf8, 0x5b, 0x45, 0x8c, 0xb2, 0x91, 0x45,
|
||||
0xf9, 0xea, 0xca, 0x3c, 0xd5, 0x68, 0xcb, 0x2e, 0xc1, 0xd2, 0x05, 0x4e, 0xb6, 0xd8, 0x45, 0xea,
|
||||
0x62, 0xae, 0x50, 0x5d, 0xe4, 0x34, 0xfe, 0xfc, 0x98, 0xc6, 0xff, 0xca, 0x80, 0xdb, 0xa9, 0xe0,
|
||||
0x5c, 0xa7, 0x76, 0x65, 0x42, 0x5a, 0xca, 0x87, 0x74, 0x2f, 0x5b, 0xd3, 0xcd, 0x22, 0x8c, 0x53,
|
||||
0x35, 0x3d, 0x0e, 0x6e, 0xa6, 0xae, 0x3f, 0x87, 0x15, 0x41, 0x9e, 0x37, 0x83, 0xe3, 0xbf, 0x0d,
|
||||
0x58, 0x3c, 0xa0, 0x6d, 0x89, 0x60, 0x3a, 0x79, 0x8c, 0xec, 0xd7, 0x9b, 0x55, 0x30, 0x03, 0xd2,
|
||||
0xd7, 0x85, 0x58, 0xfc, 0x14, 0x97, 0x8b, 0x71, 0x3f, 0xe2, 0xa3, 0xef, 0x4f, 0x42, 0x59, 0x09,
|
||||
0x8b, 0xfc, 0x7c, 0xf1, 0x2e, 0x94, 0x71, 0x18, 0xa8, 0x87, 0x5a, 0xa3, 0xe2, 0x30, 0x90, 0x8f,
|
||||
0x6e, 0xa6, 0x07, 0x58, 0x83, 0xf9, 0x01, 0x1d, 0x7d, 0x33, 0x52, 0x03, 0x67, 0x0d, 0xd0, 0x33,
|
||||
0xcc, 0x0f, 0x68, 0x5b, 0xa0, 0x12, 0x87, 0xc7, 0xf9, 0x67, 0x49, 0xb6, 0x04, 0x23, 0xf3, 0x75,
|
||||
0x00, 0x76, 0x60, 0x49, 0x31, 0xcf, 0x17, 0xb4, 0xed, 0x85, 0xc3, 0x38, 0x28, 0xb6, 0x34, 0x1e,
|
||||
0xd0, 0xf6, 0xd1, 0xb0, 0x8f, 0xde, 0x83, 0x3b, 0x24, 0xf4, 0x06, 0x9a, 0x0c, 0x13, 0x4f, 0x15,
|
||||
0xa5, 0x55, 0x12, 0xc6, 0x34, 0xa9, 0xdd, 0x1f, 0xc1, 0x0a, 0x0e, 0x5f, 0x0e, 0xf1, 0x10, 0x27,
|
||||
0xae, 0x2a, 0x66, 0x4b, 0xda, 0xac, 0xfd, 0x04, 0xe9, 0xf9, 0xec, 0xcc, 0x63, 0x3d, 0xca, 0x99,
|
||||
0x2e, 0x86, 0x96, 0xb0, 0xb4, 0x84, 0x01, 0x7d, 0x08, 0x96, 0x98, 0xae, 0x52, 0x4b, 0x49, 0xfb,
|
||||
0xbb, 0x45, 0xa9, 0xa5, 0xf1, 0x76, 0xcb, 0x5f, 0xa8, 0x1f, 0x4c, 0x5c, 0x10, 0xad, 0x83, 0x03,
|
||||
0xc2, 0xce, 0x34, 0xc5, 0x80, 0x32, 0x35, 0x08, 0x3b, 0xdb, 0xf9, 0xca, 0x02, 0x90, 0x19, 0xb9,
|
||||
0x4f, 0x69, 0x14, 0xa0, 0x81, 0x0c, 0xf3, 0x3e, 0xed, 0x0f, 0x68, 0x88, 0x43, 0x2e, 0xaf, 0x2d,
|
||||
0x43, 0x4f, 0x26, 0x7c, 0x69, 0x19, 0x77, 0xd5, 0xc0, 0xd4, 0x1e, 0x4d, 0x98, 0x91, 0x73, 0x77,
|
||||
0x6e, 0xa1, 0x97, 0x52, 0x3d, 0x8b, 0x21, 0x61, 0x9c, 0x74, 0xd8, 0xfe, 0xa9, 0x1f, 0x86, 0xb8,
|
||||
0x87, 0x76, 0x26, 0xef, 0x39, 0xe6, 0x1c, 0xef, 0xfa, 0x20, 0x3b, 0x47, 0x0f, 0x5a, 0x3c, 0x22,
|
||||
0xe1, 0x49, 0x9c, 0x1b, 0xce, 0x2d, 0xf4, 0x02, 0xec, 0x54, 0x8b, 0x8d, 0x1e, 0x15, 0x85, 0x72,
|
||||
0xbc, 0x07, 0xaf, 0x5d, 0x94, 0x44, 0xce, 0x2d, 0xd4, 0x85, 0xa5, 0xcc, 0x37, 0x20, 0x54, 0xbf,
|
||||
0x48, 0xb4, 0xa7, 0x3f, 0xbc, 0xd4, 0xbe, 0x33, 0x83, 0x67, 0x72, 0xfa, 0x5f, 0xaa, 0x80, 0x8d,
|
||||
0x7d, 0x44, 0xd9, 0x9e, 0xb0, 0xc8, 0xa4, 0xcf, 0x3d, 0xb5, 0x27, 0xb3, 0x4f, 0x48, 0x36, 0x0f,
|
||||
0x46, 0x2f, 0xa9, 0x12, 0xec, 0xf1, 0xf4, 0xce, 0x44, 0xed, 0x56, 0x9f, 0xb5, 0x85, 0x71, 0x6e,
|
||||
0xa1, 0x63, 0xb0, 0x92, 0x2e, 0x02, 0x7d, 0xbb, 0x68, 0x62, 0xbe, 0xc9, 0x98, 0x01, 0x9c, 0x8c,
|
||||
0x80, 0x2f, 0x06, 0xa7, 0xa8, 0x7f, 0x28, 0x06, 0xa7, 0xb0, 0x1b, 0x70, 0x6e, 0xa1, 0x5f, 0x8d,
|
||||
0x3e, 0x04, 0x66, 0x64, 0x33, 0x7a, 0x72, 0xd1, 0xeb, 0x17, 0xa9, 0xf8, 0xda, 0xf7, 0x2e, 0x31,
|
||||
0x23, 0x95, 0x1c, 0xa8, 0x75, 0x4a, 0x5f, 0x2b, 0xf9, 0x32, 0x8c, 0x7c, 0xa1, 0xf6, 0x27, 0xdf,
|
||||
0xdf, 0x71, 0xd7, 0x89, 0x9b, 0x5f, 0x30, 0x23, 0xd9, 0xdc, 0x03, 0x78, 0x86, 0xf9, 0x21, 0xe6,
|
||||
0x11, 0xe9, 0xb0, 0xfc, 0xb5, 0xd2, 0x83, 0x91, 0x43, 0xbc, 0xd5, 0xe3, 0xa9, 0x7e, 0xf1, 0x06,
|
||||
0x3b, 0x7f, 0x5b, 0xd0, 0xff, 0xa0, 0x1d, 0xd1, 0x00, 0x7f, 0x3d, 0x6a, 0xd5, 0x31, 0x58, 0x89,
|
||||
0x80, 0x2f, 0xbe, 0x0a, 0x79, 0x7d, 0x3f, 0xed, 0x2a, 0x7c, 0x0e, 0x56, 0xa2, 0x88, 0x8a, 0x57,
|
||||
0xcc, 0xab, 0xc9, 0xda, 0xc3, 0x29, 0x5e, 0xc9, 0x69, 0x8f, 0xa0, 0x1c, 0x2b, 0x18, 0xf4, 0x60,
|
||||
0xd2, 0xbd, 0x4d, 0xaf, 0x3c, 0xe5, 0xac, 0xbf, 0x00, 0x3b, 0x45, 0xef, 0xc5, 0x95, 0x7a, 0x5c,
|
||||
0x16, 0xd4, 0x1e, 0x4f, 0xf5, 0xfb, 0x7a, 0x5c, 0x98, 0xbd, 0xef, 0x7f, 0xbe, 0x73, 0x42, 0xf8,
|
||||
0xe9, 0xb0, 0x2d, 0x22, 0xbb, 0xad, 0x3c, 0xdf, 0x23, 0x54, 0xff, 0xda, 0x8e, 0x4f, 0xb9, 0x2d,
|
||||
0x57, 0xda, 0x96, 0x71, 0x1a, 0xb4, 0xdb, 0x0b, 0x72, 0xf8, 0xfe, 0xff, 0x02, 0x00, 0x00, 0xff,
|
||||
0xff, 0x28, 0x8b, 0x3c, 0xec, 0x04, 0x1f, 0x00, 0x00,
|
||||
// 2134 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x59, 0x5f, 0x6f, 0x1b, 0x59,
|
||||
0x15, 0xef, 0xd8, 0x4e, 0xe2, 0x39, 0x63, 0x27, 0xe9, 0x6d, 0x16, 0xbc, 0x6e, 0x4b, 0xd3, 0x29,
|
||||
0x6d, 0x0d, 0xd2, 0x26, 0x25, 0xcb, 0xa2, 0x05, 0x01, 0x52, 0xfe, 0x6c, 0xbb, 0x4e, 0x37, 0x51,
|
||||
0x18, 0x57, 0x2b, 0xed, 0x0a, 0x69, 0x18, 0x7b, 0xae, 0x93, 0xbb, 0xb1, 0xe7, 0xba, 0x73, 0xef,
|
||||
0xb4, 0x4d, 0x91, 0x10, 0x2f, 0x3c, 0x2c, 0x5a, 0x09, 0x89, 0x07, 0xf8, 0x02, 0x3c, 0x2d, 0x12,
|
||||
0xbc, 0xf3, 0xc2, 0x07, 0x80, 0x8f, 0xc0, 0x13, 0x9f, 0x04, 0xdd, 0x3f, 0x33, 0x9e, 0x19, 0x8f,
|
||||
0x63, 0xe7, 0x0f, 0x4f, 0xfb, 0xe6, 0x7b, 0xee, 0xb9, 0xff, 0xce, 0xf9, 0x9d, 0xf3, 0x3b, 0x67,
|
||||
0x0c, 0x37, 0x49, 0xe0, 0xe3, 0x37, 0x6e, 0x8f, 0xd2, 0xd0, 0xdf, 0x18, 0x85, 0x94, 0x53, 0x84,
|
||||
0x86, 0x64, 0xf0, 0x2a, 0x62, 0x6a, 0xb4, 0x21, 0xe7, 0x9b, 0xb5, 0x1e, 0x1d, 0x0e, 0x69, 0xa0,
|
||||
0x64, 0xcd, 0x65, 0x12, 0x70, 0x1c, 0x06, 0xde, 0x40, 0x8f, 0x6b, 0xe9, 0x15, 0xf6, 0xdf, 0x2b,
|
||||
0x60, 0xb6, 0xc5, 0xaa, 0x76, 0xd0, 0xa7, 0xc8, 0x86, 0x5a, 0x8f, 0x0e, 0x06, 0xb8, 0xc7, 0x09,
|
||||
0x0d, 0xda, 0x7b, 0x0d, 0x63, 0xdd, 0x68, 0x95, 0x9d, 0x8c, 0x0c, 0x35, 0x60, 0xa9, 0x4f, 0xf0,
|
||||
0xc0, 0x6f, 0xef, 0x35, 0x4a, 0x72, 0x3a, 0x1e, 0xa2, 0xbb, 0x00, 0xea, 0x82, 0x81, 0x37, 0xc4,
|
||||
0x8d, 0xf2, 0xba, 0xd1, 0x32, 0x1d, 0x53, 0x4a, 0x0e, 0xbd, 0x21, 0x16, 0x0b, 0xe5, 0xa0, 0xbd,
|
||||
0xd7, 0xa8, 0xa8, 0x85, 0x7a, 0x88, 0x76, 0xc0, 0xe2, 0x67, 0x23, 0xec, 0x8e, 0xbc, 0xd0, 0x1b,
|
||||
0xb2, 0xc6, 0xc2, 0x7a, 0xb9, 0x65, 0x6d, 0xdd, 0xdf, 0xc8, 0x3c, 0x4d, 0xbf, 0xe9, 0x39, 0x3e,
|
||||
0xfb, 0xd4, 0x1b, 0x44, 0xf8, 0xc8, 0x23, 0xa1, 0x03, 0x62, 0xd5, 0x91, 0x5c, 0x84, 0xf6, 0xa0,
|
||||
0xa6, 0x0e, 0xd7, 0x9b, 0x2c, 0xce, 0xbb, 0x89, 0x25, 0x97, 0xe9, 0x5d, 0xee, 0xeb, 0x5d, 0xb0,
|
||||
0xef, 0x86, 0xf4, 0x35, 0x6b, 0x2c, 0xc9, 0x8b, 0x5a, 0x5a, 0xe6, 0xd0, 0xd7, 0x4c, 0xbc, 0x92,
|
||||
0x53, 0xee, 0x0d, 0x94, 0x42, 0x55, 0x2a, 0x98, 0x52, 0x22, 0xa7, 0x3f, 0x80, 0x05, 0xc6, 0x3d,
|
||||
0x8e, 0x1b, 0xe6, 0xba, 0xd1, 0x5a, 0xde, 0xba, 0x57, 0x78, 0x01, 0x69, 0xf1, 0x8e, 0x50, 0x73,
|
||||
0x94, 0x36, 0xfa, 0x00, 0xbe, 0xad, 0xae, 0x2f, 0x87, 0x6e, 0xdf, 0x23, 0x03, 0x37, 0xc4, 0x1e,
|
||||
0xa3, 0x41, 0x03, 0xa4, 0x21, 0xd7, 0x48, 0xb2, 0xe6, 0xa9, 0x47, 0x06, 0x8e, 0x9c, 0x43, 0x36,
|
||||
0xd4, 0x09, 0x73, 0xbd, 0x88, 0x53, 0x57, 0xce, 0x37, 0xac, 0x75, 0xa3, 0x55, 0x75, 0x2c, 0xc2,
|
||||
0xb6, 0x23, 0x4e, 0xe5, 0x31, 0xe8, 0x00, 0x6e, 0x46, 0x0c, 0x87, 0x6e, 0xc6, 0x3c, 0xb5, 0x79,
|
||||
0xcd, 0xb3, 0x22, 0xd6, 0xb6, 0xc7, 0x26, 0xb2, 0x7f, 0x67, 0x00, 0x3c, 0x95, 0x1e, 0x97, 0xbb,
|
||||
0xff, 0x34, 0x76, 0x3a, 0x09, 0xfa, 0x54, 0x02, 0xc6, 0xda, 0xba, 0xbb, 0x31, 0x89, 0xca, 0x8d,
|
||||
0x04, 0x65, 0x1a, 0x13, 0x12, 0x70, 0x0d, 0x58, 0xf2, 0xf1, 0x00, 0x73, 0xec, 0x4b, 0x30, 0x55,
|
||||
0x9d, 0x78, 0x88, 0xee, 0x81, 0xd5, 0x0b, 0xb1, 0xb0, 0x05, 0x27, 0x1a, 0x4d, 0x15, 0x07, 0x94,
|
||||
0xe8, 0x05, 0x19, 0x62, 0xfb, 0xcb, 0x0a, 0xd4, 0x3a, 0xf8, 0x78, 0x88, 0x03, 0xae, 0x6e, 0x32,
|
||||
0x0f, 0x78, 0xd7, 0xc1, 0x1a, 0x79, 0x21, 0x27, 0x5a, 0x45, 0x01, 0x38, 0x2d, 0x42, 0x77, 0xc0,
|
||||
0x64, 0x7a, 0xd7, 0x3d, 0x79, 0x6a, 0xd9, 0x19, 0x0b, 0xd0, 0xbb, 0x50, 0x0d, 0xa2, 0xa1, 0x72,
|
||||
0xbd, 0x06, 0x71, 0x10, 0x0d, 0xa5, 0xe3, 0x53, 0xf0, 0x5e, 0xc8, 0xc2, 0xbb, 0x01, 0x4b, 0xdd,
|
||||
0x88, 0xc8, 0x88, 0x59, 0x54, 0x33, 0x7a, 0x88, 0xbe, 0x05, 0x8b, 0x01, 0xf5, 0x71, 0x7b, 0x4f,
|
||||
0x03, 0x4d, 0x8f, 0xd0, 0x03, 0xa8, 0x2b, 0xa3, 0xbe, 0xc2, 0x21, 0x23, 0x34, 0xd0, 0x30, 0x53,
|
||||
0xd8, 0xfc, 0x54, 0xc9, 0x2e, 0x8b, 0xb4, 0x7b, 0x60, 0x4d, 0xa2, 0x0b, 0xfa, 0x63, 0x4c, 0x7d,
|
||||
0x3f, 0xce, 0x33, 0x7d, 0x32, 0xc0, 0xcc, 0x1d, 0x79, 0xfc, 0x84, 0x35, 0xac, 0xf5, 0x72, 0xcb,
|
||||
0x74, 0x56, 0xe4, 0xc4, 0x53, 0x21, 0x3f, 0x12, 0xe2, 0xb4, 0xff, 0x6a, 0xe7, 0xfa, 0xaf, 0x9e,
|
||||
0xf7, 0x1f, 0x7a, 0x08, 0xcb, 0x0c, 0x87, 0xc4, 0x1b, 0x90, 0xb7, 0xd8, 0x65, 0xe4, 0x2d, 0x6e,
|
||||
0x2c, 0x4b, 0x9d, 0x7a, 0x22, 0xed, 0x90, 0xb7, 0x58, 0x98, 0xe2, 0x75, 0x48, 0x38, 0x76, 0x4f,
|
||||
0xbc, 0xc0, 0xa7, 0xfd, 0x7e, 0x63, 0x45, 0x9e, 0x53, 0x93, 0xc2, 0x8f, 0x95, 0xcc, 0xfe, 0xb3,
|
||||
0x01, 0xb7, 0x1c, 0x7c, 0x4c, 0x18, 0xc7, 0xe1, 0x21, 0xf5, 0xb1, 0x83, 0x5f, 0x46, 0x98, 0x71,
|
||||
0xf4, 0x04, 0x2a, 0x5d, 0x8f, 0x61, 0x0d, 0xcb, 0x3b, 0x85, 0x16, 0x3a, 0x60, 0xc7, 0x3b, 0x1e,
|
||||
0xc3, 0x8e, 0xd4, 0x44, 0x3f, 0x82, 0x25, 0xcf, 0xf7, 0x43, 0xcc, 0x98, 0x04, 0xc7, 0xb4, 0x45,
|
||||
0xdb, 0x4a, 0xc7, 0x89, 0x95, 0x53, 0x9e, 0x2c, 0xa7, 0x3d, 0x69, 0xff, 0xc1, 0x80, 0xb5, 0xec,
|
||||
0xcd, 0xd8, 0x88, 0x06, 0x0c, 0xa3, 0xf7, 0x61, 0x51, 0xf8, 0x23, 0x62, 0xfa, 0x72, 0xb7, 0x0b,
|
||||
0xcf, 0xe9, 0x48, 0x15, 0x47, 0xab, 0x8a, 0x44, 0x49, 0x02, 0xc2, 0xe3, 0x20, 0x56, 0x37, 0xbc,
|
||||
0x9f, 0x8f, 0x36, 0x9d, 0xee, 0xdb, 0x01, 0xe1, 0x2a, 0x66, 0x1d, 0x20, 0xc9, 0x6f, 0xfb, 0x33,
|
||||
0x58, 0x7b, 0x86, 0x79, 0x0a, 0x17, 0xda, 0x56, 0xf3, 0x84, 0x4f, 0x36, 0xc3, 0x97, 0x72, 0x19,
|
||||
0xde, 0xfe, 0x8b, 0x01, 0xef, 0xe4, 0xf6, 0xbe, 0xca, 0x6b, 0x13, 0x80, 0x97, 0xae, 0x02, 0xf0,
|
||||
0x72, 0x1e, 0xe0, 0xf6, 0x6f, 0x0d, 0xb8, 0xfd, 0x0c, 0xf3, 0x74, 0xf2, 0xb8, 0x66, 0x4b, 0xa0,
|
||||
0xef, 0x00, 0x24, 0x49, 0x83, 0x35, 0xca, 0xeb, 0xe5, 0x56, 0xd9, 0x49, 0x49, 0xec, 0x2f, 0x0d,
|
||||
0xb8, 0x39, 0x71, 0x7e, 0x36, 0xf7, 0x18, 0xf9, 0xdc, 0xf3, 0xff, 0x32, 0xc7, 0x1f, 0x0d, 0xb8,
|
||||
0x53, 0x6c, 0x8e, 0xab, 0x38, 0xef, 0x67, 0x6a, 0x11, 0x16, 0x28, 0x15, 0x54, 0xf3, 0xb0, 0x88,
|
||||
0x13, 0x26, 0xcf, 0xd4, 0x8b, 0xec, 0xaf, 0xca, 0x80, 0x76, 0x65, 0xb2, 0x90, 0x93, 0x17, 0x71,
|
||||
0xcd, 0xa5, 0x0b, 0x94, 0x5c, 0x19, 0x52, 0xb9, 0x8e, 0x32, 0x64, 0xe1, 0x52, 0x65, 0xc8, 0x1d,
|
||||
0x30, 0x45, 0xd6, 0x64, 0xdc, 0x1b, 0x8e, 0x24, 0x67, 0x54, 0x9c, 0xb1, 0x60, 0x92, 0xf4, 0x97,
|
||||
0xe6, 0x24, 0xfd, 0xea, 0xa5, 0x49, 0xff, 0x0d, 0xdc, 0x8a, 0x03, 0x5b, 0x52, 0xf8, 0x05, 0xdc,
|
||||
0x91, 0x0d, 0x85, 0x52, 0x3e, 0x14, 0x66, 0x38, 0xc5, 0xfe, 0x4f, 0x09, 0x6e, 0xb6, 0x63, 0xd6,
|
||||
0x11, 0xa4, 0x23, 0xeb, 0x86, 0xf3, 0x23, 0x65, 0x3a, 0x02, 0x52, 0x24, 0x5d, 0x9e, 0x4a, 0xd2,
|
||||
0x95, 0x2c, 0x49, 0x67, 0x2f, 0xb8, 0x90, 0x47, 0xcd, 0xf5, 0x14, 0x9e, 0x2d, 0x58, 0x1d, 0x93,
|
||||
0xae, 0xe6, 0xdc, 0x25, 0xc9, 0xb9, 0xcb, 0x24, 0xfd, 0x7a, 0x86, 0x1e, 0xc3, 0x4a, 0xc2, 0x90,
|
||||
0xbe, 0x22, 0xce, 0xaa, 0x44, 0xc8, 0x98, 0x4e, 0xfd, 0x98, 0x39, 0xb3, 0x45, 0x84, 0x39, 0x59,
|
||||
0x44, 0xd8, 0xff, 0x30, 0xc0, 0x4a, 0xa2, 0x70, 0xce, 0x0e, 0x20, 0x63, 0xfc, 0x52, 0xde, 0xf8,
|
||||
0xf7, 0xa1, 0x86, 0x03, 0xaf, 0x3b, 0xc0, 0x1a, 0x9c, 0x65, 0x05, 0x4e, 0x25, 0x53, 0xe0, 0x7c,
|
||||
0x0a, 0xd6, 0xb8, 0x66, 0x8c, 0x03, 0xed, 0xe1, 0xd4, 0xa2, 0x31, 0xed, 0x79, 0x07, 0x92, 0xe2,
|
||||
0x91, 0xd9, 0xbf, 0x2f, 0x8d, 0xb9, 0x4c, 0xc1, 0xf2, 0x2a, 0x19, 0xeb, 0x97, 0x50, 0xd3, 0xaf,
|
||||
0x50, 0xb5, 0xac, 0xca, 0x5b, 0x3f, 0x2e, 0xba, 0x56, 0xd1, 0xa1, 0x1b, 0x29, 0x33, 0x7e, 0x14,
|
||||
0xf0, 0xf0, 0xcc, 0xb1, 0xd8, 0x58, 0xd2, 0x74, 0x61, 0x35, 0xaf, 0x80, 0x56, 0xa1, 0x7c, 0x8a,
|
||||
0xcf, 0xb4, 0x8d, 0xc5, 0x4f, 0x91, 0xe3, 0x5f, 0x09, 0x80, 0x68, 0x6a, 0xbf, 0x77, 0x6e, 0xd2,
|
||||
0xec, 0x53, 0x47, 0x69, 0xff, 0xa4, 0xf4, 0xa1, 0x61, 0x9f, 0xc1, 0xea, 0x5e, 0x48, 0x47, 0x17,
|
||||
0x4e, 0x97, 0x36, 0xd4, 0x52, 0xf5, 0x6f, 0x1c, 0xa1, 0x19, 0xd9, 0xac, 0x18, 0xfd, 0x0c, 0xd6,
|
||||
0xf6, 0x30, 0xeb, 0x85, 0xa4, 0x7b, 0xf1, 0x6c, 0x3d, 0xa3, 0xa4, 0xf8, 0xca, 0x80, 0x77, 0x72,
|
||||
0x7b, 0x5f, 0xc5, 0xc7, 0x3f, 0xcf, 0x22, 0x4f, 0xb9, 0x78, 0x46, 0xbb, 0x92, 0x46, 0x9c, 0x27,
|
||||
0xa9, 0x52, 0xce, 0xed, 0x88, 0xf4, 0x70, 0x14, 0xd2, 0x63, 0x59, 0x08, 0x5e, 0xdf, 0x8b, 0xff,
|
||||
0x64, 0xc0, 0xdd, 0x29, 0x67, 0x5c, 0xe5, 0xe5, 0xf9, 0xce, 0xb6, 0x34, 0xab, 0xb3, 0x2d, 0xe7,
|
||||
0x3a, 0x5b, 0xfb, 0xaf, 0x25, 0xa8, 0x77, 0x38, 0x0d, 0xbd, 0x63, 0xbc, 0x4b, 0x83, 0x3e, 0x39,
|
||||
0x16, 0x39, 0x33, 0x2e, 0x96, 0x0d, 0xf9, 0x8c, 0xa4, 0x1c, 0xbe, 0x0f, 0x35, 0xaf, 0xd7, 0xc3,
|
||||
0x8c, 0xb9, 0xa7, 0xf8, 0x4c, 0x67, 0x09, 0xd3, 0xb1, 0x94, 0xec, 0xb9, 0x10, 0x89, 0x36, 0x83,
|
||||
0xe1, 0x5e, 0x88, 0xb9, 0x3b, 0xd6, 0xd4, 0xd0, 0x5a, 0x51, 0x13, 0xdb, 0xb1, 0xb6, 0xa8, 0xae,
|
||||
0x23, 0x86, 0x3b, 0x9d, 0x4f, 0x64, 0x6e, 0xae, 0x3a, 0x7a, 0x24, 0x6a, 0x9b, 0x6e, 0xd4, 0x3b,
|
||||
0xc5, 0x3c, 0x9d, 0x9b, 0x41, 0x89, 0x64, 0x72, 0xbe, 0x0d, 0x66, 0x48, 0x29, 0x97, 0x09, 0x55,
|
||||
0x12, 0xa9, 0xe9, 0x54, 0x85, 0x40, 0xa4, 0x13, 0xbd, 0x6b, 0x7b, 0xfb, 0x40, 0x13, 0xa8, 0x1e,
|
||||
0x89, 0x26, 0xb1, 0xbd, 0x7d, 0xf0, 0x51, 0xe0, 0x8f, 0x28, 0x09, 0xb8, 0xcc, 0xae, 0xa6, 0x93,
|
||||
0x16, 0x89, 0xe7, 0x31, 0x65, 0x09, 0x57, 0x70, 0xbf, 0xcc, 0xac, 0xa6, 0x63, 0x69, 0xd9, 0x8b,
|
||||
0xb3, 0x11, 0xb6, 0xff, 0x5b, 0x86, 0x55, 0x55, 0xc0, 0xec, 0xd3, 0x6e, 0x0c, 0x8f, 0x3b, 0x60,
|
||||
0xf6, 0x06, 0x91, 0xe8, 0x05, 0x34, 0x36, 0x4c, 0x67, 0x2c, 0xc8, 0x36, 0x5e, 0xee, 0x28, 0xc4,
|
||||
0x7d, 0xf2, 0x46, 0x5b, 0x6e, 0xdc, 0x78, 0x1d, 0x49, 0x71, 0x9a, 0xae, 0xca, 0x13, 0x74, 0xe5,
|
||||
0x7b, 0xdc, 0xd3, 0x1c, 0x52, 0x91, 0x1c, 0x62, 0x0a, 0x89, 0xa2, 0x8f, 0x09, 0x56, 0x58, 0x28,
|
||||
0x68, 0x2d, 0x53, 0x34, 0xb9, 0x98, 0xa5, 0xc9, 0x2c, 0x78, 0x97, 0xf2, 0x64, 0xf8, 0x31, 0x2c,
|
||||
0xc7, 0x86, 0xe9, 0x49, 0x8c, 0x48, 0xeb, 0x15, 0xf4, 0x28, 0x32, 0x91, 0xa5, 0xc1, 0xe4, 0xd4,
|
||||
0x59, 0x06, 0x5b, 0x79, 0x5a, 0x35, 0x2f, 0x45, 0xab, 0xb9, 0x92, 0x0e, 0x2e, 0x53, 0xd2, 0xa5,
|
||||
0x7b, 0x7e, 0x2b, 0xd3, 0xf3, 0xdb, 0x9f, 0xc0, 0xea, 0x2f, 0x22, 0x1c, 0x9e, 0xed, 0xd3, 0x2e,
|
||||
0x9b, 0xcf, 0xc7, 0x4d, 0xa8, 0x6a, 0x47, 0xc5, 0x99, 0x36, 0x19, 0xdb, 0xff, 0x32, 0xa0, 0x2e,
|
||||
0xc3, 0xfe, 0x85, 0xc7, 0x4e, 0xe3, 0xcf, 0x23, 0xb1, 0x97, 0x8d, 0xac, 0x97, 0x2f, 0xdf, 0x0c,
|
||||
0xa4, 0x7a, 0x7b, 0xd9, 0x98, 0x98, 0x3a, 0xc1, 0xc9, 0xae, 0xbe, 0xa8, 0xba, 0xa8, 0x14, 0x56,
|
||||
0x17, 0xb9, 0xb6, 0x62, 0x61, 0xa2, 0xad, 0xf8, 0xda, 0x80, 0x9b, 0x29, 0xe3, 0x5c, 0x25, 0x77,
|
||||
0x65, 0x4c, 0x5a, 0xca, 0x9b, 0x74, 0x27, 0x9b, 0xd3, 0xcb, 0x45, 0x3e, 0x4e, 0xe5, 0xf4, 0xd8,
|
||||
0xb8, 0x99, 0xbc, 0xfe, 0x1c, 0x56, 0x04, 0x79, 0x5e, 0x8f, 0x1f, 0xff, 0x6d, 0xc0, 0xd2, 0x3e,
|
||||
0xed, 0x4a, 0x0f, 0xa6, 0xc1, 0x63, 0x64, 0x3f, 0x18, 0xad, 0x42, 0xd9, 0x27, 0x43, 0x9d, 0x88,
|
||||
0xc5, 0x4f, 0x11, 0x5c, 0x8c, 0x7b, 0x21, 0x1f, 0x7f, 0xf2, 0x12, 0x95, 0x95, 0x90, 0xc8, 0x2f,
|
||||
0x26, 0xef, 0x42, 0x15, 0x07, 0xbe, 0x9a, 0xd4, 0x35, 0x2a, 0x0e, 0x7c, 0x39, 0x75, 0x3d, 0x6d,
|
||||
0xc7, 0x1a, 0x2c, 0x8c, 0xe8, 0xf8, 0x33, 0x95, 0x1a, 0xd8, 0x6b, 0x80, 0x9e, 0x61, 0xbe, 0x4f,
|
||||
0xbb, 0xc2, 0x2b, 0xb1, 0x79, 0xec, 0x7f, 0x96, 0x64, 0x4b, 0x30, 0x16, 0x5f, 0xc5, 0xc1, 0x36,
|
||||
0xd4, 0x15, 0xf3, 0x7c, 0x41, 0xbb, 0x6e, 0x10, 0xc5, 0x46, 0xb1, 0xa4, 0x70, 0x9f, 0x76, 0x0f,
|
||||
0xa3, 0x21, 0x7a, 0x0f, 0x6e, 0x91, 0xc0, 0x1d, 0x69, 0x32, 0x4c, 0x34, 0x95, 0x95, 0x56, 0x49,
|
||||
0x10, 0xd3, 0xa4, 0x56, 0x7f, 0x04, 0x2b, 0x38, 0x78, 0x19, 0xe1, 0x08, 0x27, 0xaa, 0xca, 0x66,
|
||||
0x75, 0x2d, 0xd6, 0x7a, 0x82, 0xf4, 0x3c, 0x76, 0xea, 0xb2, 0x01, 0xe5, 0x4c, 0x27, 0x43, 0x53,
|
||||
0x48, 0x3a, 0x42, 0x80, 0x3e, 0x04, 0x53, 0x2c, 0x57, 0xd0, 0x52, 0xa5, 0xfd, 0xed, 0x22, 0x68,
|
||||
0x69, 0x7f, 0x3b, 0xd5, 0x2f, 0xd4, 0x0f, 0x26, 0x02, 0x44, 0xd7, 0xc1, 0x3e, 0x61, 0xa7, 0x9a,
|
||||
0x62, 0x40, 0x89, 0xf6, 0x08, 0x3b, 0xdd, 0xfa, 0xda, 0x04, 0x90, 0x88, 0xdc, 0xa5, 0x34, 0xf4,
|
||||
0xd1, 0x48, 0x9a, 0x79, 0x97, 0x0e, 0x47, 0x34, 0xc0, 0x01, 0x97, 0x61, 0xcb, 0xd0, 0x93, 0x29,
|
||||
0x1f, 0x77, 0x26, 0x55, 0xb5, 0x63, 0x9a, 0x8f, 0xa6, 0xac, 0xc8, 0xa9, 0xdb, 0x37, 0xd0, 0x4b,
|
||||
0x59, 0x3d, 0x8b, 0x21, 0x61, 0x9c, 0xf4, 0xd8, 0xee, 0x89, 0x17, 0x04, 0x78, 0x80, 0xb6, 0xa6,
|
||||
0x9f, 0x39, 0xa1, 0x1c, 0x9f, 0xfa, 0x20, 0xbb, 0x46, 0x0f, 0x3a, 0x3c, 0x24, 0xc1, 0x71, 0x8c,
|
||||
0x0d, 0xfb, 0x06, 0x7a, 0x01, 0x56, 0xaa, 0xab, 0x47, 0x8f, 0x8a, 0x4c, 0x39, 0xd9, 0xf6, 0x37,
|
||||
0xcf, 0x03, 0x91, 0x7d, 0x03, 0xf5, 0xa1, 0x9e, 0xf9, 0xec, 0x84, 0x5a, 0xe7, 0x15, 0xed, 0xe9,
|
||||
0x6f, 0x3d, 0xcd, 0xef, 0xcd, 0xa1, 0x99, 0xdc, 0xfe, 0xd7, 0xca, 0x60, 0x13, 0xdf, 0x6d, 0x36,
|
||||
0xa7, 0x6c, 0x32, 0xed, 0x0b, 0x53, 0xf3, 0xc9, 0xfc, 0x0b, 0x92, 0xc3, 0xfd, 0xf1, 0x23, 0x15,
|
||||
0xc0, 0x1e, 0xcf, 0xee, 0x4c, 0xd4, 0x69, 0xad, 0x79, 0x5b, 0x18, 0xfb, 0x06, 0x3a, 0x02, 0x33,
|
||||
0xe9, 0x22, 0xd0, 0x77, 0x8b, 0x16, 0xe6, 0x9b, 0x8c, 0x39, 0x9c, 0x93, 0x29, 0xe0, 0x8b, 0x9d,
|
||||
0x53, 0xd4, 0x3f, 0x14, 0x3b, 0xa7, 0xb0, 0x1b, 0xb0, 0x6f, 0xa0, 0xdf, 0x8c, 0xbf, 0x3d, 0x66,
|
||||
0xca, 0x66, 0xf4, 0xe4, 0xbc, 0xe7, 0x17, 0x55, 0xf1, 0xcd, 0x1f, 0x5c, 0x60, 0x45, 0x0a, 0x1c,
|
||||
0xa8, 0x73, 0x42, 0x5f, 0xab, 0xf2, 0x25, 0x0a, 0x3d, 0x51, 0xed, 0x4f, 0x8f, 0xdf, 0x49, 0xd5,
|
||||
0xa9, 0x87, 0x9f, 0xb3, 0x22, 0x39, 0xdc, 0x05, 0x78, 0x86, 0xf9, 0x01, 0xe6, 0x21, 0xe9, 0xb1,
|
||||
0x7c, 0x58, 0xe9, 0xc1, 0x58, 0x21, 0x3e, 0xea, 0xf1, 0x4c, 0xbd, 0xf8, 0x80, 0xad, 0xbf, 0x2d,
|
||||
0xea, 0xff, 0x09, 0x0f, 0xa9, 0x8f, 0xbf, 0x19, 0xb9, 0xea, 0x08, 0xcc, 0xa4, 0x80, 0x2f, 0x0e,
|
||||
0x85, 0x7c, 0x7d, 0x3f, 0x2b, 0x14, 0x3e, 0x07, 0x33, 0xa9, 0x88, 0x8a, 0x77, 0xcc, 0x57, 0x93,
|
||||
0xcd, 0x87, 0x33, 0xb4, 0x92, 0xdb, 0x1e, 0x42, 0x35, 0xae, 0x60, 0xd0, 0x83, 0x69, 0x71, 0x9b,
|
||||
0xde, 0x79, 0xc6, 0x5d, 0x7f, 0x05, 0x56, 0x8a, 0xde, 0x8b, 0x33, 0xf5, 0x64, 0x59, 0xd0, 0x7c,
|
||||
0x3c, 0x53, 0xef, 0x9b, 0x11, 0x30, 0x3b, 0x3f, 0xfc, 0x7c, 0xeb, 0x98, 0xf0, 0x93, 0xa8, 0x2b,
|
||||
0x2c, 0xbb, 0xa9, 0x34, 0xdf, 0x23, 0x54, 0xff, 0xda, 0x8c, 0x6f, 0xb9, 0x29, 0x77, 0xda, 0x94,
|
||||
0x76, 0x1a, 0x75, 0xbb, 0x8b, 0x72, 0xf8, 0xfe, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x8f, 0x26,
|
||||
0xb1, 0x27, 0xea, 0x1f, 0x00, 0x00,
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
|
||||
@ -1852,11 +1852,11 @@ func (node *Proxy) CreateIndex(ctx context.Context, request *milvuspb.CreateInde
|
||||
traceID, _, _ := trace.InfoFromSpan(sp)
|
||||
|
||||
cit := &createIndexTask{
|
||||
ctx: ctx,
|
||||
Condition: NewTaskCondition(ctx),
|
||||
CreateIndexRequest: request,
|
||||
rootCoord: node.rootCoord,
|
||||
indexCoord: node.indexCoord,
|
||||
ctx: ctx,
|
||||
Condition: NewTaskCondition(ctx),
|
||||
req: request,
|
||||
rootCoord: node.rootCoord,
|
||||
indexCoord: node.indexCoord,
|
||||
}
|
||||
|
||||
method := "CreateIndex"
|
||||
@ -1877,10 +1877,10 @@ func (node *Proxy) CreateIndex(ctx context.Context, request *milvuspb.CreateInde
|
||||
zap.Error(err),
|
||||
zap.String("traceID", traceID),
|
||||
zap.String("role", typeutil.ProxyRole),
|
||||
zap.String("db", request.DbName),
|
||||
zap.String("collection", request.CollectionName),
|
||||
zap.String("field", request.FieldName),
|
||||
zap.Any("extra_params", request.ExtraParams))
|
||||
zap.String("db", request.GetDbName()),
|
||||
zap.String("collection", request.GetCollectionName()),
|
||||
zap.String("field", request.GetFieldName()),
|
||||
zap.Any("extra_params", request.GetExtraParams()))
|
||||
|
||||
metrics.ProxyDMLFunctionCall.WithLabelValues(strconv.FormatInt(Params.ProxyCfg.GetNodeID(), 10), method,
|
||||
metrics.AbandonLabel).Inc()
|
||||
|
||||
@ -702,7 +702,7 @@ func TestProxy(t *testing.T) {
|
||||
Value: strconv.Itoa(dim),
|
||||
},
|
||||
{
|
||||
Key: MetricTypeKey,
|
||||
Key: common.MetricTypeKey,
|
||||
Value: distance.L2,
|
||||
},
|
||||
{
|
||||
@ -1538,7 +1538,7 @@ func TestProxy(t *testing.T) {
|
||||
OpRight: opRight,
|
||||
Params: []*commonpb.KeyValuePair{
|
||||
{
|
||||
Key: MetricTypeKey,
|
||||
Key: common.MetricTypeKey,
|
||||
Value: distance.L2,
|
||||
},
|
||||
},
|
||||
|
||||
@ -22,9 +22,6 @@ import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/milvus-io/milvus/internal/proto/indexpb"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"go.uber.org/zap"
|
||||
@ -43,8 +40,6 @@ import (
|
||||
"github.com/milvus-io/milvus/internal/proto/planpb"
|
||||
"github.com/milvus-io/milvus/internal/proto/querypb"
|
||||
|
||||
"github.com/milvus-io/milvus/internal/util/funcutil"
|
||||
"github.com/milvus-io/milvus/internal/util/indexparamcheck"
|
||||
"github.com/milvus-io/milvus/internal/util/timerecord"
|
||||
"github.com/milvus-io/milvus/internal/util/trace"
|
||||
"github.com/milvus-io/milvus/internal/util/typeutil"
|
||||
@ -59,35 +54,25 @@ const (
|
||||
OffsetKey = "offset"
|
||||
LimitKey = "limit"
|
||||
|
||||
InsertTaskName = "InsertTask"
|
||||
CreateCollectionTaskName = "CreateCollectionTask"
|
||||
DropCollectionTaskName = "DropCollectionTask"
|
||||
SearchTaskName = "SearchTask"
|
||||
RetrieveTaskName = "RetrieveTask"
|
||||
QueryTaskName = "QueryTask"
|
||||
HasCollectionTaskName = "HasCollectionTask"
|
||||
DescribeCollectionTaskName = "DescribeCollectionTask"
|
||||
GetCollectionStatisticsTaskName = "GetCollectionStatisticsTask"
|
||||
GetPartitionStatisticsTaskName = "GetPartitionStatisticsTask"
|
||||
ShowCollectionTaskName = "ShowCollectionTask"
|
||||
CreatePartitionTaskName = "CreatePartitionTask"
|
||||
DropPartitionTaskName = "DropPartitionTask"
|
||||
HasPartitionTaskName = "HasPartitionTask"
|
||||
ShowPartitionTaskName = "ShowPartitionTask"
|
||||
CreateIndexTaskName = "CreateIndexTask"
|
||||
DescribeIndexTaskName = "DescribeIndexTask"
|
||||
DropIndexTaskName = "DropIndexTask"
|
||||
GetIndexStateTaskName = "GetIndexStateTask"
|
||||
GetIndexBuildProgressTaskName = "GetIndexBuildProgressTask"
|
||||
FlushTaskName = "FlushTask"
|
||||
LoadCollectionTaskName = "LoadCollectionTask"
|
||||
ReleaseCollectionTaskName = "ReleaseCollectionTask"
|
||||
LoadPartitionTaskName = "LoadPartitionsTask"
|
||||
ReleasePartitionTaskName = "ReleasePartitionsTask"
|
||||
deleteTaskName = "DeleteTask"
|
||||
CreateAliasTaskName = "CreateAliasTask"
|
||||
DropAliasTaskName = "DropAliasTask"
|
||||
AlterAliasTaskName = "AlterAliasTask"
|
||||
InsertTaskName = "InsertTask"
|
||||
CreateCollectionTaskName = "CreateCollectionTask"
|
||||
DropCollectionTaskName = "DropCollectionTask"
|
||||
HasCollectionTaskName = "HasCollectionTask"
|
||||
DescribeCollectionTaskName = "DescribeCollectionTask"
|
||||
ShowCollectionTaskName = "ShowCollectionTask"
|
||||
CreatePartitionTaskName = "CreatePartitionTask"
|
||||
DropPartitionTaskName = "DropPartitionTask"
|
||||
HasPartitionTaskName = "HasPartitionTask"
|
||||
ShowPartitionTaskName = "ShowPartitionTask"
|
||||
FlushTaskName = "FlushTask"
|
||||
LoadCollectionTaskName = "LoadCollectionTask"
|
||||
ReleaseCollectionTaskName = "ReleaseCollectionTask"
|
||||
LoadPartitionTaskName = "LoadPartitionsTask"
|
||||
ReleasePartitionTaskName = "ReleasePartitionsTask"
|
||||
deleteTaskName = "DeleteTask"
|
||||
CreateAliasTaskName = "CreateAliasTask"
|
||||
DropAliasTaskName = "DropAliasTask"
|
||||
AlterAliasTaskName = "AlterAliasTask"
|
||||
|
||||
// minFloat32 minimum float.
|
||||
minFloat32 = -1 * float32(math.MaxFloat32)
|
||||
@ -326,58 +311,6 @@ func (dct *dropCollectionTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Support wildcard in output fields:
|
||||
// "*" - all scalar fields
|
||||
// "%" - all vector fields
|
||||
// For example, A and B are scalar fields, C and D are vector fields, duplicated fields will automatically be removed.
|
||||
// output_fields=["*"] ==> [A,B]
|
||||
// output_fields=["%"] ==> [C,D]
|
||||
// output_fields=["*","%"] ==> [A,B,C,D]
|
||||
// output_fields=["*",A] ==> [A,B]
|
||||
// output_fields=["*",C] ==> [A,B,C]
|
||||
func translateOutputFields(outputFields []string, schema *schemapb.CollectionSchema, addPrimary bool) ([]string, error) {
|
||||
var primaryFieldName string
|
||||
scalarFieldNameMap := make(map[string]bool)
|
||||
vectorFieldNameMap := make(map[string]bool)
|
||||
resultFieldNameMap := make(map[string]bool)
|
||||
resultFieldNames := make([]string, 0)
|
||||
|
||||
for _, field := range schema.Fields {
|
||||
if field.IsPrimaryKey {
|
||||
primaryFieldName = field.Name
|
||||
}
|
||||
if field.DataType == schemapb.DataType_BinaryVector || field.DataType == schemapb.DataType_FloatVector {
|
||||
vectorFieldNameMap[field.Name] = true
|
||||
} else {
|
||||
scalarFieldNameMap[field.Name] = true
|
||||
}
|
||||
}
|
||||
|
||||
for _, outputFieldName := range outputFields {
|
||||
outputFieldName = strings.TrimSpace(outputFieldName)
|
||||
if outputFieldName == "*" {
|
||||
for fieldName := range scalarFieldNameMap {
|
||||
resultFieldNameMap[fieldName] = true
|
||||
}
|
||||
} else if outputFieldName == "%" {
|
||||
for fieldName := range vectorFieldNameMap {
|
||||
resultFieldNameMap[fieldName] = true
|
||||
}
|
||||
} else {
|
||||
resultFieldNameMap[outputFieldName] = true
|
||||
}
|
||||
}
|
||||
|
||||
if addPrimary {
|
||||
resultFieldNameMap[primaryFieldName] = true
|
||||
}
|
||||
|
||||
for fieldName := range resultFieldNameMap {
|
||||
resultFieldNames = append(resultFieldNames, fieldName)
|
||||
}
|
||||
return resultFieldNames, nil
|
||||
}
|
||||
|
||||
type hasCollectionTask struct {
|
||||
Condition
|
||||
*milvuspb.HasCollectionRequest
|
||||
@ -1125,611 +1058,6 @@ func (spt *showPartitionsTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type createIndexTask struct {
|
||||
Condition
|
||||
*milvuspb.CreateIndexRequest
|
||||
ctx context.Context
|
||||
rootCoord types.RootCoord
|
||||
indexCoord types.IndexCoord
|
||||
result *commonpb.Status
|
||||
|
||||
collectionID UniqueID
|
||||
fieldSchema *schemapb.FieldSchema
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) TraceCtx() context.Context {
|
||||
return cit.ctx
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) ID() UniqueID {
|
||||
return cit.Base.MsgID
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) SetID(uid UniqueID) {
|
||||
cit.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) Name() string {
|
||||
return CreateIndexTaskName
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) Type() commonpb.MsgType {
|
||||
return cit.Base.MsgType
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) BeginTs() Timestamp {
|
||||
return cit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) EndTs() Timestamp {
|
||||
return cit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) SetTs(ts Timestamp) {
|
||||
cit.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) OnEnqueue() error {
|
||||
cit.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseIndexParams(m []*commonpb.KeyValuePair) (map[string]string, error) {
|
||||
indexParams := make(map[string]string)
|
||||
for _, kv := range m {
|
||||
if kv.Key == "params" { // TODO(dragondriver): change `params` to const variable
|
||||
params, err := funcutil.ParseIndexParamsMap(kv.Value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for k, v := range params {
|
||||
indexParams[k] = v
|
||||
}
|
||||
} else {
|
||||
indexParams[kv.Key] = kv.Value
|
||||
}
|
||||
}
|
||||
_, exist := indexParams["index_type"] // TODO(dragondriver): change `index_type` to const variable
|
||||
if !exist {
|
||||
//return nil, errors.New("there is no index_type in index params")
|
||||
indexParams["index_type"] = indexparamcheck.IndexFaissIvfPQ // IVF_PQ is the default index type
|
||||
}
|
||||
return indexParams, nil
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) getIndexedField(ctx context.Context) (*schemapb.FieldSchema, error) {
|
||||
schema, err := globalMetaCache.GetCollectionSchema(ctx, cit.GetCollectionName())
|
||||
if err != nil {
|
||||
log.Error("failed to get collection schema", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get collection schema: %s", err)
|
||||
}
|
||||
schemaHelper, err := typeutil.CreateSchemaHelper(schema)
|
||||
if err != nil {
|
||||
log.Error("failed to parse collection schema", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse collection schema: %s", err)
|
||||
}
|
||||
field, err := schemaHelper.GetFieldFromName(cit.GetFieldName())
|
||||
if err != nil {
|
||||
log.Error("create index on non-exist field", zap.Error(err))
|
||||
return nil, fmt.Errorf("cannot create index on non-exist field: %s", cit.GetFieldName())
|
||||
}
|
||||
return field, nil
|
||||
}
|
||||
|
||||
func fillDimension(field *schemapb.FieldSchema, indexParams map[string]string) error {
|
||||
vecDataTypes := []schemapb.DataType{
|
||||
schemapb.DataType_FloatVector,
|
||||
schemapb.DataType_BinaryVector,
|
||||
}
|
||||
if !funcutil.SliceContain(vecDataTypes, field.GetDataType()) {
|
||||
return nil
|
||||
}
|
||||
params := make([]*commonpb.KeyValuePair, 0, len(field.GetTypeParams())+len(field.GetIndexParams()))
|
||||
params = append(params, field.GetTypeParams()...)
|
||||
params = append(params, field.GetIndexParams()...)
|
||||
dimensionInSchema, err := funcutil.GetAttrByKeyFromRepeatedKV("dim", params)
|
||||
if err != nil {
|
||||
return fmt.Errorf("dimension not found in schema")
|
||||
}
|
||||
dimension, exist := indexParams["dim"]
|
||||
if exist {
|
||||
if dimensionInSchema != dimension {
|
||||
return fmt.Errorf("dimension mismatch, dimension in schema: %s, dimension: %s", dimensionInSchema, dimension)
|
||||
}
|
||||
} else {
|
||||
indexParams["dim"] = dimensionInSchema
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkTrain(field *schemapb.FieldSchema, indexParams map[string]string) error {
|
||||
indexType := indexParams["index_type"]
|
||||
|
||||
// skip params check of non-vector field.
|
||||
vecDataTypes := []schemapb.DataType{
|
||||
schemapb.DataType_FloatVector,
|
||||
schemapb.DataType_BinaryVector,
|
||||
}
|
||||
if !funcutil.SliceContain(vecDataTypes, field.GetDataType()) {
|
||||
return indexparamcheck.CheckIndexValid(field.GetDataType(), indexType, indexParams)
|
||||
}
|
||||
|
||||
adapter, err := indexparamcheck.GetConfAdapterMgrInstance().GetAdapter(indexType)
|
||||
if err != nil {
|
||||
log.Warn("Failed to get conf adapter", zap.String("index_type", indexType))
|
||||
return fmt.Errorf("invalid index type: %s", indexType)
|
||||
}
|
||||
|
||||
if err := fillDimension(field, indexParams); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ok := adapter.CheckValidDataType(field.GetDataType())
|
||||
if !ok {
|
||||
log.Warn("Field data type don't support the index build type", zap.String("fieldDataType", field.GetDataType().String()), zap.String("indexType", indexType))
|
||||
return fmt.Errorf("field data type %s don't support the index build type %s", field.GetDataType().String(), indexType)
|
||||
}
|
||||
|
||||
ok = adapter.CheckTrain(indexParams)
|
||||
if !ok {
|
||||
log.Warn("Create index with invalid params", zap.Any("index_params", indexParams))
|
||||
return fmt.Errorf("invalid index params: %v", indexParams)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) PreExecute(ctx context.Context) error {
|
||||
cit.Base.MsgType = commonpb.MsgType_CreateIndex
|
||||
cit.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
collName := cit.CollectionName
|
||||
|
||||
collID, err := globalMetaCache.GetCollectionID(ctx, collName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cit.collectionID = collID
|
||||
|
||||
field, err := cit.getIndexedField(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cit.fieldSchema = field
|
||||
|
||||
// check index param, not accurate, only some static rules
|
||||
indexParams, err := parseIndexParams(cit.GetExtraParams())
|
||||
if err != nil {
|
||||
log.Error("failed to parse index params", zap.Error(err))
|
||||
return fmt.Errorf("failed to parse index params: %s", err)
|
||||
}
|
||||
|
||||
return checkTrain(field, indexParams)
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) Execute(ctx context.Context) error {
|
||||
log.Debug("proxy create index", zap.Int64("collID", cit.collectionID), zap.Int64("fieldID", cit.fieldSchema.GetFieldID()),
|
||||
zap.String("indexName", cit.GetIndexName()), zap.Any("typeParams", cit.fieldSchema.GetTypeParams()),
|
||||
zap.Any("indexParams", cit.GetExtraParams()))
|
||||
indexParams := cit.GetExtraParams()
|
||||
if !typeutil.IsVectorType(cit.fieldSchema.DataType) {
|
||||
if cit.fieldSchema.DataType == schemapb.DataType_VarChar {
|
||||
indexParams = []*commonpb.KeyValuePair{{Key: "index_type", Value: DefaultStringIndexType}}
|
||||
} else {
|
||||
indexParams = []*commonpb.KeyValuePair{{Key: "index_type", Value: DefaultIndexType}}
|
||||
}
|
||||
}
|
||||
if cit.IndexName == "" {
|
||||
cit.IndexName = Params.CommonCfg.DefaultIndexName + "_" + strconv.FormatInt(cit.fieldSchema.GetFieldID(), 10)
|
||||
}
|
||||
var err error
|
||||
req := &indexpb.CreateIndexRequest{
|
||||
CollectionID: cit.collectionID,
|
||||
FieldID: cit.fieldSchema.GetFieldID(),
|
||||
IndexName: cit.GetIndexName(),
|
||||
TypeParams: cit.fieldSchema.GetTypeParams(),
|
||||
IndexParams: indexParams,
|
||||
Timestamp: cit.BeginTs(),
|
||||
}
|
||||
cit.result, err = cit.indexCoord.CreateIndex(ctx, req)
|
||||
//cit.result, err = cit.rootCoord.CreateIndex(ctx, cit.CreateIndexRequest)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if cit.result.ErrorCode != commonpb.ErrorCode_Success {
|
||||
return errors.New(cit.result.Reason)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type describeIndexTask struct {
|
||||
Condition
|
||||
*milvuspb.DescribeIndexRequest
|
||||
ctx context.Context
|
||||
indexCoord types.IndexCoord
|
||||
result *milvuspb.DescribeIndexResponse
|
||||
|
||||
collectionID UniqueID
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) TraceCtx() context.Context {
|
||||
return dit.ctx
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) ID() UniqueID {
|
||||
return dit.Base.MsgID
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) SetID(uid UniqueID) {
|
||||
dit.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) Name() string {
|
||||
return DescribeIndexTaskName
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) Type() commonpb.MsgType {
|
||||
return dit.Base.MsgType
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) BeginTs() Timestamp {
|
||||
return dit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) EndTs() Timestamp {
|
||||
return dit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) SetTs(ts Timestamp) {
|
||||
dit.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) OnEnqueue() error {
|
||||
dit.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) PreExecute(ctx context.Context) error {
|
||||
dit.Base.MsgType = commonpb.MsgType_DescribeIndex
|
||||
dit.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
if err := validateCollectionName(dit.CollectionName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
collID, _ := globalMetaCache.GetCollectionID(ctx, dit.CollectionName)
|
||||
dit.collectionID = collID
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) Execute(ctx context.Context) error {
|
||||
schema, err := globalMetaCache.GetCollectionSchema(ctx, dit.GetCollectionName())
|
||||
if err != nil {
|
||||
log.Error("failed to get collection schema", zap.Error(err))
|
||||
return fmt.Errorf("failed to get collection schema: %s", err)
|
||||
}
|
||||
schemaHelper, err := typeutil.CreateSchemaHelper(schema)
|
||||
if err != nil {
|
||||
log.Error("failed to parse collection schema", zap.Error(err))
|
||||
return fmt.Errorf("failed to parse collection schema: %s", err)
|
||||
}
|
||||
|
||||
resp, err := dit.indexCoord.DescribeIndex(ctx, &indexpb.DescribeIndexRequest{CollectionID: dit.collectionID, IndexName: dit.IndexName})
|
||||
if err != nil || resp == nil {
|
||||
return err
|
||||
}
|
||||
dit.result = &milvuspb.DescribeIndexResponse{}
|
||||
dit.result.Status = resp.GetStatus()
|
||||
if dit.result.Status.ErrorCode != commonpb.ErrorCode_Success {
|
||||
return errors.New(dit.result.Status.Reason)
|
||||
}
|
||||
for _, indexInfo := range resp.IndexInfos {
|
||||
field, err := schemaHelper.GetFieldFromID(indexInfo.FieldID)
|
||||
if err != nil {
|
||||
log.Error("failed to get collection field", zap.Error(err))
|
||||
return fmt.Errorf("failed to get collection field: %d", indexInfo.FieldID)
|
||||
}
|
||||
|
||||
dit.result.IndexDescriptions = append(dit.result.IndexDescriptions, &milvuspb.IndexDescription{
|
||||
IndexName: indexInfo.GetIndexName(),
|
||||
IndexID: indexInfo.GetIndexID(),
|
||||
FieldName: field.Name,
|
||||
Params: indexInfo.GetIndexParams(),
|
||||
IndexedRows: indexInfo.GetIndexedRows(),
|
||||
TotalRows: indexInfo.GetTotalRows(),
|
||||
State: indexInfo.GetState(),
|
||||
IndexStateFailReason: indexInfo.GetIndexStateFailReason(),
|
||||
})
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type dropIndexTask struct {
|
||||
Condition
|
||||
ctx context.Context
|
||||
*milvuspb.DropIndexRequest
|
||||
indexCoord types.IndexCoord
|
||||
result *commonpb.Status
|
||||
|
||||
collectionID UniqueID
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) TraceCtx() context.Context {
|
||||
return dit.ctx
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) ID() UniqueID {
|
||||
return dit.Base.MsgID
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) SetID(uid UniqueID) {
|
||||
dit.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) Name() string {
|
||||
return DropIndexTaskName
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) Type() commonpb.MsgType {
|
||||
return dit.Base.MsgType
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) BeginTs() Timestamp {
|
||||
return dit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) EndTs() Timestamp {
|
||||
return dit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) SetTs(ts Timestamp) {
|
||||
dit.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) OnEnqueue() error {
|
||||
dit.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) PreExecute(ctx context.Context) error {
|
||||
dit.Base.MsgType = commonpb.MsgType_DropIndex
|
||||
dit.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
collName, fieldName := dit.CollectionName, dit.FieldName
|
||||
|
||||
if err := validateCollectionName(collName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := validateFieldName(fieldName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if dit.IndexName == "" {
|
||||
dit.IndexName = Params.CommonCfg.DefaultIndexName
|
||||
}
|
||||
|
||||
collID, _ := globalMetaCache.GetCollectionID(ctx, dit.CollectionName)
|
||||
dit.collectionID = collID
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) Execute(ctx context.Context) error {
|
||||
var err error
|
||||
dit.result, err = dit.indexCoord.DropIndex(ctx, &indexpb.DropIndexRequest{
|
||||
CollectionID: dit.collectionID,
|
||||
PartitionIDs: nil,
|
||||
IndexName: dit.IndexName,
|
||||
})
|
||||
if dit.result == nil {
|
||||
return errors.New("drop index resp is nil")
|
||||
}
|
||||
if dit.result.ErrorCode != commonpb.ErrorCode_Success {
|
||||
return errors.New(dit.result.Reason)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Deprecated: use describeIndexTask instead
|
||||
type getIndexBuildProgressTask struct {
|
||||
Condition
|
||||
*milvuspb.GetIndexBuildProgressRequest
|
||||
ctx context.Context
|
||||
indexCoord types.IndexCoord
|
||||
rootCoord types.RootCoord
|
||||
dataCoord types.DataCoord
|
||||
result *milvuspb.GetIndexBuildProgressResponse
|
||||
|
||||
collectionID UniqueID
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) TraceCtx() context.Context {
|
||||
return gibpt.ctx
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) ID() UniqueID {
|
||||
return gibpt.Base.MsgID
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) SetID(uid UniqueID) {
|
||||
gibpt.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) Name() string {
|
||||
return GetIndexBuildProgressTaskName
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) Type() commonpb.MsgType {
|
||||
return gibpt.Base.MsgType
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) BeginTs() Timestamp {
|
||||
return gibpt.Base.Timestamp
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) EndTs() Timestamp {
|
||||
return gibpt.Base.Timestamp
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) SetTs(ts Timestamp) {
|
||||
gibpt.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) OnEnqueue() error {
|
||||
gibpt.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) PreExecute(ctx context.Context) error {
|
||||
gibpt.Base.MsgType = commonpb.MsgType_GetIndexBuildProgress
|
||||
gibpt.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
if err := validateCollectionName(gibpt.CollectionName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) Execute(ctx context.Context) error {
|
||||
collectionName := gibpt.CollectionName
|
||||
collectionID, err := globalMetaCache.GetCollectionID(ctx, collectionName)
|
||||
if err != nil { // err is not nil if collection not exists
|
||||
return err
|
||||
}
|
||||
gibpt.collectionID = collectionID
|
||||
|
||||
if gibpt.IndexName == "" {
|
||||
gibpt.IndexName = Params.CommonCfg.DefaultIndexName
|
||||
}
|
||||
|
||||
resp, err := gibpt.indexCoord.GetIndexBuildProgress(ctx, &indexpb.GetIndexBuildProgressRequest{
|
||||
CollectionID: collectionID,
|
||||
IndexName: gibpt.IndexName,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gibpt.result = &milvuspb.GetIndexBuildProgressResponse{
|
||||
Status: resp.Status,
|
||||
TotalRows: resp.GetTotalRows(),
|
||||
IndexedRows: resp.GetIndexedRows(),
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Deprecated: use describeIndexTask instead
|
||||
type getIndexStateTask struct {
|
||||
Condition
|
||||
*milvuspb.GetIndexStateRequest
|
||||
ctx context.Context
|
||||
indexCoord types.IndexCoord
|
||||
rootCoord types.RootCoord
|
||||
result *milvuspb.GetIndexStateResponse
|
||||
|
||||
collectionID UniqueID
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) TraceCtx() context.Context {
|
||||
return gist.ctx
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) ID() UniqueID {
|
||||
return gist.Base.MsgID
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) SetID(uid UniqueID) {
|
||||
gist.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) Name() string {
|
||||
return GetIndexStateTaskName
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) Type() commonpb.MsgType {
|
||||
return gist.Base.MsgType
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) BeginTs() Timestamp {
|
||||
return gist.Base.Timestamp
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) EndTs() Timestamp {
|
||||
return gist.Base.Timestamp
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) SetTs(ts Timestamp) {
|
||||
gist.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) OnEnqueue() error {
|
||||
gist.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) PreExecute(ctx context.Context) error {
|
||||
gist.Base.MsgType = commonpb.MsgType_GetIndexState
|
||||
gist.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
if err := validateCollectionName(gist.CollectionName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) Execute(ctx context.Context) error {
|
||||
|
||||
if gist.IndexName == "" {
|
||||
gist.IndexName = Params.CommonCfg.DefaultIndexName
|
||||
}
|
||||
collectionID, err := globalMetaCache.GetCollectionID(ctx, gist.CollectionName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
state, err := gist.indexCoord.GetIndexState(ctx, &indexpb.GetIndexStateRequest{
|
||||
CollectionID: collectionID,
|
||||
IndexName: gist.IndexName,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gist.result = &milvuspb.GetIndexStateResponse{
|
||||
Status: &commonpb.Status{
|
||||
ErrorCode: commonpb.ErrorCode_Success,
|
||||
Reason: "",
|
||||
},
|
||||
State: state.GetState(),
|
||||
FailReason: state.GetFailReason(),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type flushTask struct {
|
||||
Condition
|
||||
*milvuspb.FlushRequest
|
||||
|
||||
691
internal/proxy/task_index.go
Normal file
691
internal/proxy/task_index.go
Normal file
@ -0,0 +1,691 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package proxy
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/milvus-io/milvus/internal/proto/indexpb"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/milvus-io/milvus/internal/common"
|
||||
"github.com/milvus-io/milvus/internal/log"
|
||||
"github.com/milvus-io/milvus/internal/types"
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/milvus-io/milvus/api/milvuspb"
|
||||
"github.com/milvus-io/milvus/api/schemapb"
|
||||
"github.com/milvus-io/milvus/internal/util/funcutil"
|
||||
"github.com/milvus-io/milvus/internal/util/indexparamcheck"
|
||||
"github.com/milvus-io/milvus/internal/util/typeutil"
|
||||
)
|
||||
|
||||
const (
|
||||
CreateIndexTaskName = "CreateIndexTask"
|
||||
DescribeIndexTaskName = "DescribeIndexTask"
|
||||
DropIndexTaskName = "DropIndexTask"
|
||||
GetIndexStateTaskName = "GetIndexStateTask"
|
||||
GetIndexBuildProgressTaskName = "GetIndexBuildProgressTask"
|
||||
|
||||
AutoIndexName = "AUTOINDEX"
|
||||
DimKey = common.DimKey
|
||||
)
|
||||
|
||||
type createIndexTask struct {
|
||||
Condition
|
||||
req *milvuspb.CreateIndexRequest
|
||||
ctx context.Context
|
||||
rootCoord types.RootCoord
|
||||
indexCoord types.IndexCoord
|
||||
result *commonpb.Status
|
||||
|
||||
isAutoIndex bool
|
||||
newIndexParams []*commonpb.KeyValuePair
|
||||
|
||||
collectionID UniqueID
|
||||
fieldSchema *schemapb.FieldSchema
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) TraceCtx() context.Context {
|
||||
return cit.ctx
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) ID() UniqueID {
|
||||
return cit.req.GetBase().GetMsgID()
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) SetID(uid UniqueID) {
|
||||
cit.req.GetBase().MsgID = uid
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) Name() string {
|
||||
return CreateIndexTaskName
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) Type() commonpb.MsgType {
|
||||
return cit.req.GetBase().GetMsgType()
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) BeginTs() Timestamp {
|
||||
return cit.req.GetBase().GetTimestamp()
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) EndTs() Timestamp {
|
||||
return cit.req.GetBase().GetTimestamp()
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) SetTs(ts Timestamp) {
|
||||
cit.req.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) OnEnqueue() error {
|
||||
cit.req.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) parseIndexParams() error {
|
||||
isVecIndex := typeutil.IsVectorType(cit.fieldSchema.DataType)
|
||||
indexParamsMap := make(map[string]string)
|
||||
if !isVecIndex {
|
||||
if cit.fieldSchema.DataType == schemapb.DataType_VarChar {
|
||||
indexParamsMap[common.IndexTypeKey] = DefaultStringIndexType
|
||||
} else {
|
||||
indexParamsMap[common.IndexTypeKey] = DefaultIndexType
|
||||
}
|
||||
}
|
||||
|
||||
for _, kv := range cit.req.GetExtraParams() {
|
||||
if kv.Key == common.IndexParamsKey {
|
||||
params, err := funcutil.ParseIndexParamsMap(kv.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for k, v := range params {
|
||||
indexParamsMap[k] = v
|
||||
}
|
||||
} else {
|
||||
indexParamsMap[kv.Key] = kv.Value
|
||||
}
|
||||
}
|
||||
|
||||
if isVecIndex {
|
||||
specifyIndexType, exist := indexParamsMap[common.IndexTypeKey]
|
||||
if Params.AutoIndexConfig.Enable {
|
||||
if exist {
|
||||
if specifyIndexType != AutoIndexName {
|
||||
return fmt.Errorf("IndexType should be %s", AutoIndexName)
|
||||
}
|
||||
}
|
||||
log.Debug("create index trigger AutoIndex",
|
||||
zap.String("type", Params.AutoIndexConfig.AutoIndexTypeName))
|
||||
// override params
|
||||
for k, v := range Params.AutoIndexConfig.IndexParams {
|
||||
indexParamsMap[k] = v
|
||||
}
|
||||
} else {
|
||||
if !exist {
|
||||
return fmt.Errorf("IndexType not specified")
|
||||
}
|
||||
}
|
||||
err := checkTrain(cit.fieldSchema, indexParamsMap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
typeParams := cit.fieldSchema.GetTypeParams()
|
||||
typeParamsMap := make(map[string]interface{})
|
||||
for _, pair := range typeParams {
|
||||
typeParamsMap[pair.Key] = struct{}{}
|
||||
}
|
||||
|
||||
for k, v := range indexParamsMap {
|
||||
//Currently, it is required that type_params and index_params do not have same keys.
|
||||
_, ok := typeParamsMap[k]
|
||||
if ok {
|
||||
continue
|
||||
}
|
||||
cit.newIndexParams = append(cit.newIndexParams, &commonpb.KeyValuePair{Key: k, Value: v})
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) getIndexedField(ctx context.Context) (*schemapb.FieldSchema, error) {
|
||||
schema, err := globalMetaCache.GetCollectionSchema(ctx, cit.req.GetCollectionName())
|
||||
if err != nil {
|
||||
log.Error("failed to get collection schema", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get collection schema: %s", err)
|
||||
}
|
||||
schemaHelper, err := typeutil.CreateSchemaHelper(schema)
|
||||
if err != nil {
|
||||
log.Error("failed to parse collection schema", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to parse collection schema: %s", err)
|
||||
}
|
||||
field, err := schemaHelper.GetFieldFromName(cit.req.GetFieldName())
|
||||
if err != nil {
|
||||
log.Error("create index on non-exist field", zap.Error(err))
|
||||
return nil, fmt.Errorf("cannot create index on non-exist field: %s", cit.req.GetFieldName())
|
||||
}
|
||||
return field, nil
|
||||
}
|
||||
|
||||
func fillDimension(field *schemapb.FieldSchema, indexParams map[string]string) error {
|
||||
vecDataTypes := []schemapb.DataType{
|
||||
schemapb.DataType_FloatVector,
|
||||
schemapb.DataType_BinaryVector,
|
||||
}
|
||||
if !funcutil.SliceContain(vecDataTypes, field.GetDataType()) {
|
||||
return nil
|
||||
}
|
||||
params := make([]*commonpb.KeyValuePair, 0, len(field.GetTypeParams())+len(field.GetIndexParams()))
|
||||
params = append(params, field.GetTypeParams()...)
|
||||
params = append(params, field.GetIndexParams()...)
|
||||
dimensionInSchema, err := funcutil.GetAttrByKeyFromRepeatedKV(DimKey, params)
|
||||
if err != nil {
|
||||
return fmt.Errorf("dimension not found in schema")
|
||||
}
|
||||
dimension, exist := indexParams[DimKey]
|
||||
if exist {
|
||||
if dimensionInSchema != dimension {
|
||||
return fmt.Errorf("dimension mismatch, dimension in schema: %s, dimension: %s", dimensionInSchema, dimension)
|
||||
}
|
||||
} else {
|
||||
indexParams[DimKey] = dimensionInSchema
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkTrain(field *schemapb.FieldSchema, indexParams map[string]string) error {
|
||||
indexType := indexParams[common.IndexTypeKey]
|
||||
// skip params check of non-vector field.
|
||||
vecDataTypes := []schemapb.DataType{
|
||||
schemapb.DataType_FloatVector,
|
||||
schemapb.DataType_BinaryVector,
|
||||
}
|
||||
if !funcutil.SliceContain(vecDataTypes, field.GetDataType()) {
|
||||
return indexparamcheck.CheckIndexValid(field.GetDataType(), indexType, indexParams)
|
||||
}
|
||||
|
||||
adapter, err := indexparamcheck.GetConfAdapterMgrInstance().GetAdapter(indexType)
|
||||
if err != nil {
|
||||
log.Warn("Failed to get conf adapter", zap.String(common.IndexTypeKey, indexType))
|
||||
return fmt.Errorf("invalid index type: %s", indexType)
|
||||
}
|
||||
|
||||
if err := fillDimension(field, indexParams); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ok := adapter.CheckValidDataType(field.GetDataType())
|
||||
if !ok {
|
||||
log.Warn("Field data type don't support the index build type", zap.String("fieldDataType", field.GetDataType().String()), zap.String("indexType", indexType))
|
||||
return fmt.Errorf("field data type %s don't support the index build type %s", field.GetDataType().String(), indexType)
|
||||
}
|
||||
|
||||
ok = adapter.CheckTrain(indexParams)
|
||||
if !ok {
|
||||
log.Warn("Create index with invalid params", zap.Any("index_params", indexParams))
|
||||
return fmt.Errorf("invalid index params: %v", indexParams)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) PreExecute(ctx context.Context) error {
|
||||
cit.req.Base.MsgType = commonpb.MsgType_CreateIndex
|
||||
cit.req.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
collName := cit.req.GetCollectionName()
|
||||
|
||||
collID, err := globalMetaCache.GetCollectionID(ctx, collName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cit.collectionID = collID
|
||||
|
||||
field, err := cit.getIndexedField(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cit.fieldSchema = field
|
||||
// check index param, not accurate, only some static rules
|
||||
return cit.parseIndexParams()
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) Execute(ctx context.Context) error {
|
||||
log.Debug("proxy create index", zap.Int64("collID", cit.collectionID), zap.Int64("fieldID", cit.fieldSchema.GetFieldID()),
|
||||
zap.String("indexName", cit.req.GetIndexName()), zap.Any("typeParams", cit.fieldSchema.GetTypeParams()),
|
||||
zap.Any("indexParams", cit.req.GetExtraParams()))
|
||||
|
||||
if cit.req.GetIndexName() == "" {
|
||||
cit.req.IndexName = Params.CommonCfg.DefaultIndexName
|
||||
}
|
||||
var err error
|
||||
req := &indexpb.CreateIndexRequest{
|
||||
CollectionID: cit.collectionID,
|
||||
FieldID: cit.fieldSchema.GetFieldID(),
|
||||
IndexName: cit.req.GetIndexName(),
|
||||
TypeParams: cit.fieldSchema.GetTypeParams(),
|
||||
IndexParams: cit.newIndexParams,
|
||||
IsAutoIndex: cit.isAutoIndex,
|
||||
UserIndexParams: cit.req.GetExtraParams(),
|
||||
Timestamp: cit.BeginTs(),
|
||||
}
|
||||
cit.result, err = cit.indexCoord.CreateIndex(ctx, req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if cit.result.ErrorCode != commonpb.ErrorCode_Success {
|
||||
return errors.New(cit.result.Reason)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (cit *createIndexTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type describeIndexTask struct {
|
||||
Condition
|
||||
*milvuspb.DescribeIndexRequest
|
||||
ctx context.Context
|
||||
indexCoord types.IndexCoord
|
||||
result *milvuspb.DescribeIndexResponse
|
||||
|
||||
collectionID UniqueID
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) TraceCtx() context.Context {
|
||||
return dit.ctx
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) ID() UniqueID {
|
||||
return dit.Base.MsgID
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) SetID(uid UniqueID) {
|
||||
dit.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) Name() string {
|
||||
return DescribeIndexTaskName
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) Type() commonpb.MsgType {
|
||||
return dit.Base.MsgType
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) BeginTs() Timestamp {
|
||||
return dit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) EndTs() Timestamp {
|
||||
return dit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) SetTs(ts Timestamp) {
|
||||
dit.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) OnEnqueue() error {
|
||||
dit.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) PreExecute(ctx context.Context) error {
|
||||
dit.Base.MsgType = commonpb.MsgType_DescribeIndex
|
||||
dit.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
if err := validateCollectionName(dit.CollectionName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
collID, _ := globalMetaCache.GetCollectionID(ctx, dit.CollectionName)
|
||||
dit.collectionID = collID
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) Execute(ctx context.Context) error {
|
||||
schema, err := globalMetaCache.GetCollectionSchema(ctx, dit.GetCollectionName())
|
||||
if err != nil {
|
||||
log.Error("failed to get collection schema", zap.Error(err))
|
||||
return fmt.Errorf("failed to get collection schema: %s", err)
|
||||
}
|
||||
schemaHelper, err := typeutil.CreateSchemaHelper(schema)
|
||||
if err != nil {
|
||||
log.Error("failed to parse collection schema", zap.Error(err))
|
||||
return fmt.Errorf("failed to parse collection schema: %s", err)
|
||||
}
|
||||
|
||||
resp, err := dit.indexCoord.DescribeIndex(ctx, &indexpb.DescribeIndexRequest{CollectionID: dit.collectionID, IndexName: dit.IndexName})
|
||||
if err != nil || resp == nil {
|
||||
return err
|
||||
}
|
||||
dit.result = &milvuspb.DescribeIndexResponse{}
|
||||
dit.result.Status = resp.GetStatus()
|
||||
if dit.result.Status.ErrorCode != commonpb.ErrorCode_Success {
|
||||
return errors.New(dit.result.Status.Reason)
|
||||
}
|
||||
for _, indexInfo := range resp.IndexInfos {
|
||||
field, err := schemaHelper.GetFieldFromID(indexInfo.FieldID)
|
||||
if err != nil {
|
||||
log.Error("failed to get collection field", zap.Error(err))
|
||||
return fmt.Errorf("failed to get collection field: %d", indexInfo.FieldID)
|
||||
}
|
||||
params := indexInfo.GetUserIndexParams()
|
||||
if params == nil {
|
||||
params = indexInfo.GetIndexParams()
|
||||
}
|
||||
desc := &milvuspb.IndexDescription{
|
||||
IndexName: indexInfo.GetIndexName(),
|
||||
IndexID: indexInfo.GetIndexID(),
|
||||
FieldName: field.Name,
|
||||
Params: params,
|
||||
IndexedRows: indexInfo.GetIndexedRows(),
|
||||
TotalRows: indexInfo.GetTotalRows(),
|
||||
State: indexInfo.GetState(),
|
||||
IndexStateFailReason: indexInfo.GetIndexStateFailReason(),
|
||||
}
|
||||
dit.result.IndexDescriptions = append(dit.result.IndexDescriptions, desc)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (dit *describeIndexTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type dropIndexTask struct {
|
||||
Condition
|
||||
ctx context.Context
|
||||
*milvuspb.DropIndexRequest
|
||||
indexCoord types.IndexCoord
|
||||
result *commonpb.Status
|
||||
|
||||
collectionID UniqueID
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) TraceCtx() context.Context {
|
||||
return dit.ctx
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) ID() UniqueID {
|
||||
return dit.Base.MsgID
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) SetID(uid UniqueID) {
|
||||
dit.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) Name() string {
|
||||
return DropIndexTaskName
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) Type() commonpb.MsgType {
|
||||
return dit.Base.MsgType
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) BeginTs() Timestamp {
|
||||
return dit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) EndTs() Timestamp {
|
||||
return dit.Base.Timestamp
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) SetTs(ts Timestamp) {
|
||||
dit.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) OnEnqueue() error {
|
||||
dit.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) PreExecute(ctx context.Context) error {
|
||||
dit.Base.MsgType = commonpb.MsgType_DropIndex
|
||||
dit.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
collName, fieldName := dit.CollectionName, dit.FieldName
|
||||
|
||||
if err := validateCollectionName(collName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := validateFieldName(fieldName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if dit.IndexName == "" {
|
||||
dit.IndexName = Params.CommonCfg.DefaultIndexName
|
||||
}
|
||||
|
||||
collID, _ := globalMetaCache.GetCollectionID(ctx, dit.CollectionName)
|
||||
dit.collectionID = collID
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) Execute(ctx context.Context) error {
|
||||
var err error
|
||||
dit.result, err = dit.indexCoord.DropIndex(ctx, &indexpb.DropIndexRequest{
|
||||
CollectionID: dit.collectionID,
|
||||
PartitionIDs: nil,
|
||||
IndexName: dit.IndexName,
|
||||
})
|
||||
if dit.result == nil {
|
||||
return errors.New("drop index resp is nil")
|
||||
}
|
||||
if dit.result.ErrorCode != commonpb.ErrorCode_Success {
|
||||
return errors.New(dit.result.Reason)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (dit *dropIndexTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Deprecated: use describeIndexTask instead
|
||||
type getIndexBuildProgressTask struct {
|
||||
Condition
|
||||
*milvuspb.GetIndexBuildProgressRequest
|
||||
ctx context.Context
|
||||
indexCoord types.IndexCoord
|
||||
rootCoord types.RootCoord
|
||||
dataCoord types.DataCoord
|
||||
result *milvuspb.GetIndexBuildProgressResponse
|
||||
|
||||
collectionID UniqueID
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) TraceCtx() context.Context {
|
||||
return gibpt.ctx
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) ID() UniqueID {
|
||||
return gibpt.Base.MsgID
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) SetID(uid UniqueID) {
|
||||
gibpt.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) Name() string {
|
||||
return GetIndexBuildProgressTaskName
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) Type() commonpb.MsgType {
|
||||
return gibpt.Base.MsgType
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) BeginTs() Timestamp {
|
||||
return gibpt.Base.Timestamp
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) EndTs() Timestamp {
|
||||
return gibpt.Base.Timestamp
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) SetTs(ts Timestamp) {
|
||||
gibpt.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) OnEnqueue() error {
|
||||
gibpt.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) PreExecute(ctx context.Context) error {
|
||||
gibpt.Base.MsgType = commonpb.MsgType_GetIndexBuildProgress
|
||||
gibpt.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
if err := validateCollectionName(gibpt.CollectionName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) Execute(ctx context.Context) error {
|
||||
collectionName := gibpt.CollectionName
|
||||
collectionID, err := globalMetaCache.GetCollectionID(ctx, collectionName)
|
||||
if err != nil { // err is not nil if collection not exists
|
||||
return err
|
||||
}
|
||||
gibpt.collectionID = collectionID
|
||||
|
||||
if gibpt.IndexName == "" {
|
||||
gibpt.IndexName = Params.CommonCfg.DefaultIndexName
|
||||
}
|
||||
|
||||
resp, err := gibpt.indexCoord.GetIndexBuildProgress(ctx, &indexpb.GetIndexBuildProgressRequest{
|
||||
CollectionID: collectionID,
|
||||
IndexName: gibpt.IndexName,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gibpt.result = &milvuspb.GetIndexBuildProgressResponse{
|
||||
Status: resp.Status,
|
||||
TotalRows: resp.GetTotalRows(),
|
||||
IndexedRows: resp.GetIndexedRows(),
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gibpt *getIndexBuildProgressTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Deprecated: use describeIndexTask instead
|
||||
type getIndexStateTask struct {
|
||||
Condition
|
||||
*milvuspb.GetIndexStateRequest
|
||||
ctx context.Context
|
||||
indexCoord types.IndexCoord
|
||||
rootCoord types.RootCoord
|
||||
result *milvuspb.GetIndexStateResponse
|
||||
|
||||
collectionID UniqueID
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) TraceCtx() context.Context {
|
||||
return gist.ctx
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) ID() UniqueID {
|
||||
return gist.Base.MsgID
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) SetID(uid UniqueID) {
|
||||
gist.Base.MsgID = uid
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) Name() string {
|
||||
return GetIndexStateTaskName
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) Type() commonpb.MsgType {
|
||||
return gist.Base.MsgType
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) BeginTs() Timestamp {
|
||||
return gist.Base.Timestamp
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) EndTs() Timestamp {
|
||||
return gist.Base.Timestamp
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) SetTs(ts Timestamp) {
|
||||
gist.Base.Timestamp = ts
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) OnEnqueue() error {
|
||||
gist.Base = &commonpb.MsgBase{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) PreExecute(ctx context.Context) error {
|
||||
gist.Base.MsgType = commonpb.MsgType_GetIndexState
|
||||
gist.Base.SourceID = Params.ProxyCfg.GetNodeID()
|
||||
|
||||
if err := validateCollectionName(gist.CollectionName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) Execute(ctx context.Context) error {
|
||||
|
||||
if gist.IndexName == "" {
|
||||
gist.IndexName = Params.CommonCfg.DefaultIndexName
|
||||
}
|
||||
collectionID, err := globalMetaCache.GetCollectionID(ctx, gist.CollectionName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
state, err := gist.indexCoord.GetIndexState(ctx, &indexpb.GetIndexStateRequest{
|
||||
CollectionID: collectionID,
|
||||
IndexName: gist.IndexName,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gist.result = &milvuspb.GetIndexStateResponse{
|
||||
Status: &commonpb.Status{
|
||||
ErrorCode: commonpb.ErrorCode_Success,
|
||||
Reason: "",
|
||||
},
|
||||
State: state.GetState(),
|
||||
FailReason: state.GetFailReason(),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gist *getIndexStateTask) PostExecute(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
@ -34,6 +34,11 @@ const (
|
||||
WithoutCache = false
|
||||
)
|
||||
|
||||
const (
|
||||
RetrieveTaskName = "RetrieveTask"
|
||||
QueryTaskName = "QueryTask"
|
||||
)
|
||||
|
||||
type queryTask struct {
|
||||
Condition
|
||||
*internalpb.RetrieveRequest
|
||||
|
||||
@ -2,11 +2,13 @@ package proxy
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
|
||||
"github.com/milvus-io/milvus/internal/common"
|
||||
"github.com/milvus-io/milvus/internal/parser/planparserv2"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
@ -32,6 +34,11 @@ import (
|
||||
"github.com/milvus-io/milvus/internal/proto/querypb"
|
||||
)
|
||||
|
||||
const (
|
||||
SearchTaskName = "SearchTask"
|
||||
SearchLevelKey = "level"
|
||||
)
|
||||
|
||||
type searchTask struct {
|
||||
Condition
|
||||
*internalpb.SearchRequest
|
||||
@ -89,8 +96,63 @@ func getPartitionIDs(ctx context.Context, collectionName string, partitionNames
|
||||
return partitionIDs, nil
|
||||
}
|
||||
|
||||
// parseQueryInfo returns QueryInfo and offset
|
||||
func parseQueryInfo(searchParamsPair []*commonpb.KeyValuePair) (*planpb.QueryInfo, int64, error) {
|
||||
func parseSearchParams(searchParamsPair []*commonpb.KeyValuePair) (string, error) {
|
||||
searchParamStr, err := funcutil.GetAttrByKeyFromRepeatedKV(SearchParamsKey, searchParamsPair)
|
||||
if Params.AutoIndexConfig.Enable {
|
||||
searchParamMap := make(map[string]interface{})
|
||||
var level int
|
||||
if err == nil { // if specified params, we try to parse params
|
||||
err = json.Unmarshal([]byte(searchParamStr), &searchParamMap)
|
||||
if err == nil { // if unmarshal success, we try to parse level
|
||||
if searchParamMap == nil { // is searchParamStr equal "null", searchParamMap will become nil!
|
||||
searchParamMap = make(map[string]interface{})
|
||||
}
|
||||
levelValue, ok := searchParamMap[SearchLevelKey]
|
||||
if !ok { // if level is not specified, set to default 1
|
||||
level = 1
|
||||
} else {
|
||||
switch lValue := levelValue.(type) {
|
||||
case float64: // for numeric values, json unmarshal will interpret it as float64
|
||||
level = int(lValue)
|
||||
case string:
|
||||
level, err = strconv.Atoi(lValue)
|
||||
default:
|
||||
err = fmt.Errorf("wrong level in search params")
|
||||
}
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("search params in wrong format:%w", err)
|
||||
}
|
||||
} else {
|
||||
level = 1
|
||||
}
|
||||
calculator := Params.AutoIndexConfig.GetSearchParamStrCalculator(level)
|
||||
if calculator == nil {
|
||||
return "", fmt.Errorf("search params calculator not found for level:%d", level)
|
||||
}
|
||||
newSearchParamMap, err2 := calculator.Calculate(searchParamsPair)
|
||||
if err2 != nil {
|
||||
return "", errors.New("search params calculate failed")
|
||||
}
|
||||
for k, v := range newSearchParamMap {
|
||||
searchParamMap[k] = v
|
||||
}
|
||||
searchParamValue, err2 := json.Marshal(searchParamMap)
|
||||
if err2 != nil {
|
||||
return "", err2
|
||||
}
|
||||
searchParamStr = string(searchParamValue)
|
||||
} else {
|
||||
if err != nil {
|
||||
return "", errors.New(SearchParamsKey + " not found in search_params")
|
||||
}
|
||||
}
|
||||
return searchParamStr, nil
|
||||
}
|
||||
|
||||
// parseSearchInfo returns QueryInfo and offset
|
||||
func parseSearchInfo(searchParamsPair []*commonpb.KeyValuePair) (*planpb.QueryInfo, int64, error) {
|
||||
topKStr, err := funcutil.GetAttrByKeyFromRepeatedKV(TopKKey, searchParamsPair)
|
||||
if err != nil {
|
||||
return nil, 0, errors.New(TopKKey + " not found in search_params")
|
||||
@ -123,14 +185,9 @@ func parseQueryInfo(searchParamsPair []*commonpb.KeyValuePair) (*planpb.QueryInf
|
||||
return nil, 0, fmt.Errorf("%s+%s [%d] is invalid, %w", OffsetKey, TopKKey, queryTopK, err)
|
||||
}
|
||||
|
||||
metricType, err := funcutil.GetAttrByKeyFromRepeatedKV(MetricTypeKey, searchParamsPair)
|
||||
metricType, err := funcutil.GetAttrByKeyFromRepeatedKV(common.MetricTypeKey, searchParamsPair)
|
||||
if err != nil {
|
||||
return nil, 0, errors.New(MetricTypeKey + " not found in search_params")
|
||||
}
|
||||
|
||||
searchParams, err := funcutil.GetAttrByKeyFromRepeatedKV(SearchParamsKey, searchParamsPair)
|
||||
if err != nil {
|
||||
return nil, 0, errors.New(SearchParamsKey + " not found in search_params")
|
||||
return nil, 0, errors.New(common.MetricTypeKey + " not found in search_params")
|
||||
}
|
||||
|
||||
roundDecimalStr, err := funcutil.GetAttrByKeyFromRepeatedKV(RoundDecimalKey, searchParamsPair)
|
||||
@ -146,11 +203,14 @@ func parseQueryInfo(searchParamsPair []*commonpb.KeyValuePair) (*planpb.QueryInf
|
||||
if roundDecimal != -1 && (roundDecimal > 6 || roundDecimal < 0) {
|
||||
return nil, 0, fmt.Errorf("%s [%s] is invalid, should be -1 or an integer in range [0, 6]", RoundDecimalKey, roundDecimalStr)
|
||||
}
|
||||
|
||||
searchParamStr, err := parseSearchParams(searchParamsPair)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
return &planpb.QueryInfo{
|
||||
Topk: queryTopK,
|
||||
MetricType: metricType,
|
||||
SearchParams: searchParams,
|
||||
SearchParams: searchParamStr,
|
||||
RoundDecimal: roundDecimal,
|
||||
}, offset, nil
|
||||
}
|
||||
@ -245,7 +305,7 @@ func (t *searchTask) PreExecute(ctx context.Context) error {
|
||||
return errors.New(AnnsFieldKey + " not found in search_params")
|
||||
}
|
||||
|
||||
queryInfo, offset, err := parseQueryInfo(t.request.GetSearchParams())
|
||||
queryInfo, offset, err := parseSearchInfo(t.request.GetSearchParams())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ package proxy
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
@ -9,6 +10,8 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/milvus-io/milvus/internal/util/autoindex"
|
||||
"github.com/milvus-io/milvus/internal/util/indexparamcheck"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
@ -95,7 +98,7 @@ func getValidSearchParams() []*commonpb.KeyValuePair {
|
||||
Value: "10",
|
||||
},
|
||||
{
|
||||
Key: MetricTypeKey,
|
||||
Key: common.MetricTypeKey,
|
||||
Value: distance.L2,
|
||||
},
|
||||
{
|
||||
@ -1756,7 +1759,7 @@ func TestSearchTask_ErrExecute(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestTaskSearch_parseQueryInfo(t *testing.T) {
|
||||
t.Run("parseQueryInfo no error", func(t *testing.T) {
|
||||
t.Run("parseSearchInfo no error", func(t *testing.T) {
|
||||
var targetOffset int64 = 200
|
||||
|
||||
sp := getValidSearchParams()
|
||||
@ -1765,13 +1768,13 @@ func TestTaskSearch_parseQueryInfo(t *testing.T) {
|
||||
Value: strconv.FormatInt(targetOffset, 10),
|
||||
})
|
||||
|
||||
info, offset, err := parseQueryInfo(sp)
|
||||
info, offset, err := parseSearchInfo(sp)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, info)
|
||||
assert.Equal(t, targetOffset, offset)
|
||||
})
|
||||
|
||||
t.Run("parseQueryInfo error", func(t *testing.T) {
|
||||
t.Run("parseSearchInfo error", func(t *testing.T) {
|
||||
spNoTopk := []*commonpb.KeyValuePair{{
|
||||
Key: AnnsFieldKey,
|
||||
Value: testFloatVecField}}
|
||||
@ -1797,7 +1800,7 @@ func TestTaskSearch_parseQueryInfo(t *testing.T) {
|
||||
})
|
||||
|
||||
spNoSearchParams := append(spNoMetricType, &commonpb.KeyValuePair{
|
||||
Key: MetricTypeKey,
|
||||
Key: common.MetricTypeKey,
|
||||
Value: distance.L2,
|
||||
})
|
||||
|
||||
@ -1851,7 +1854,7 @@ func TestTaskSearch_parseQueryInfo(t *testing.T) {
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.description, func(t *testing.T) {
|
||||
info, offset, err := parseQueryInfo(test.invalidParams)
|
||||
info, offset, err := parseSearchInfo(test.invalidParams)
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, info)
|
||||
assert.Zero(t, offset)
|
||||
@ -1862,6 +1865,203 @@ func TestTaskSearch_parseQueryInfo(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestTaskSearch_parseSearchParams_AutoIndexEnable(t *testing.T) {
|
||||
oldEnable := Params.AutoIndexConfig.Enable
|
||||
oldIndexType := Params.AutoIndexConfig.IndexType
|
||||
oldIndexParams := Params.AutoIndexConfig.IndexParams
|
||||
oldSearchParamYamStr := Params.AutoIndexConfig.SearchParamsYamlStr
|
||||
oldParser := Params.AutoIndexConfig.Parser
|
||||
//parseSearchParams
|
||||
Params.AutoIndexConfig.Enable = true
|
||||
Params.AutoIndexConfig.IndexType = indexparamcheck.IndexHNSW
|
||||
Params.AutoIndexConfig.IndexParams = make(map[string]string)
|
||||
|
||||
buildParams := map[string]interface{}{
|
||||
common.MetricTypeKey: indexparamcheck.L2,
|
||||
common.IndexTypeKey: indexparamcheck.IndexHNSW,
|
||||
"M": 8,
|
||||
"efConstruction": 50,
|
||||
}
|
||||
buildParamsJSONValue, err := json.Marshal(buildParams)
|
||||
assert.NoError(t, err)
|
||||
Params.AutoIndexConfig.IndexParams, err = funcutil.ParseIndexParamsMap(string(buildParamsJSONValue))
|
||||
assert.NoError(t, err)
|
||||
|
||||
jsonStr := `
|
||||
{
|
||||
"1": {
|
||||
"bp": [10, 90],
|
||||
"functions": [
|
||||
"__ef = __topk * 2.2 + 31",
|
||||
"__ef = __topk * 1.58 + 39",
|
||||
"__ef = __topk"
|
||||
]
|
||||
},
|
||||
"2": {
|
||||
"bp": [10, 200],
|
||||
"functions": [
|
||||
"__ef = __topk *3 + 64",
|
||||
"__ef = 8 * pow(__topk, 0.7) + 50",
|
||||
"__ef = __topk"
|
||||
]
|
||||
},
|
||||
"3": {
|
||||
"bp": [10, 300],
|
||||
"functions": [
|
||||
"__ef = 10 * pow(__topk, 0.7) + 80",
|
||||
"__ef = 10 * pow(__topk, 0.66) + 74",
|
||||
"__ef = __topk"
|
||||
]
|
||||
}
|
||||
}`
|
||||
Params.AutoIndexConfig.Parser = autoindex.NewParser()
|
||||
Params.AutoIndexConfig.Parser.InitFromJSONStr(jsonStr)
|
||||
|
||||
normalKVPairs := []*commonpb.KeyValuePair{
|
||||
{
|
||||
Key: AnnsFieldKey,
|
||||
Value: testFloatVecField,
|
||||
},
|
||||
{
|
||||
Key: TopKKey,
|
||||
Value: "10",
|
||||
},
|
||||
{
|
||||
Key: RoundDecimalKey,
|
||||
Value: "-1",
|
||||
},
|
||||
{
|
||||
Key: common.MetricTypeKey,
|
||||
Value: indexparamcheck.L2,
|
||||
},
|
||||
}
|
||||
|
||||
normalWithNilParams := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: "null",
|
||||
},
|
||||
)
|
||||
|
||||
//var normalWithLevel []*commonpb.KeyValuePair
|
||||
normalWithEmptyParams := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: "{}",
|
||||
},
|
||||
)
|
||||
|
||||
normalWithNormalLevel := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: `{"level": 1 }`,
|
||||
},
|
||||
)
|
||||
|
||||
normalWithNormalStrLevel := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: `{"level": "1" }`,
|
||||
},
|
||||
)
|
||||
|
||||
normalTests := []struct {
|
||||
description string
|
||||
params []*commonpb.KeyValuePair
|
||||
}{
|
||||
{"normal", normalKVPairs},
|
||||
{"normal_with_nil_params", normalWithNilParams},
|
||||
{"normal_with_empty_params", normalWithEmptyParams},
|
||||
{"normal_with_normal_level", normalWithNormalLevel},
|
||||
{"normal_with_normal_str_level", normalWithNormalStrLevel},
|
||||
}
|
||||
|
||||
for _, test := range normalTests {
|
||||
t.Run(test.description, func(t *testing.T) {
|
||||
_, _, err := parseSearchInfo(test.params)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
invalidWithWrongParams := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: "",
|
||||
},
|
||||
)
|
||||
|
||||
invalidWithWrongLevel := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: `{"level":x}`,
|
||||
},
|
||||
)
|
||||
|
||||
invalidWithWrongStrLevel := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: `{"level":"x"}`,
|
||||
},
|
||||
)
|
||||
|
||||
invalidWithSmallLevel := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: `{"level":-1}`,
|
||||
},
|
||||
)
|
||||
|
||||
invalidWithSmallStrLevel := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: `{"level":"-1"}`,
|
||||
},
|
||||
)
|
||||
|
||||
invalidWithLargeLevel := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: `{"level":100}`,
|
||||
},
|
||||
)
|
||||
|
||||
invalidWithLargeStrLevel := append(normalKVPairs,
|
||||
&commonpb.KeyValuePair{
|
||||
Key: SearchParamsKey,
|
||||
Value: `{"level":"100"}`,
|
||||
},
|
||||
)
|
||||
|
||||
invalidTests := []struct {
|
||||
description string
|
||||
params []*commonpb.KeyValuePair
|
||||
}{
|
||||
{"invalid_wrong_params", invalidWithWrongParams},
|
||||
{"invalid_wrong_level", invalidWithWrongLevel},
|
||||
{"invalid_wrong_str_level", invalidWithWrongStrLevel},
|
||||
{"invalid_with_small_level", invalidWithSmallLevel},
|
||||
{"invalid_with_small_str_level", invalidWithSmallStrLevel},
|
||||
{"invalid_with_large_level", invalidWithLargeLevel},
|
||||
{"invalid_with_large_str_level", invalidWithLargeStrLevel},
|
||||
}
|
||||
|
||||
for _, test := range invalidTests {
|
||||
t.Run(test.description, func(t *testing.T) {
|
||||
info, offset, err := parseSearchInfo(test.params)
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, info)
|
||||
assert.Zero(t, offset)
|
||||
})
|
||||
}
|
||||
|
||||
Params.AutoIndexConfig.Enable = oldEnable
|
||||
Params.AutoIndexConfig.IndexType = oldIndexType
|
||||
Params.AutoIndexConfig.IndexParams = oldIndexParams
|
||||
Params.AutoIndexConfig.SearchParamsYamlStr = oldSearchParamYamStr
|
||||
Params.AutoIndexConfig.Parser = oldParser
|
||||
|
||||
}
|
||||
|
||||
func getSearchResultData(nq, topk int64) *schemapb.SearchResultData {
|
||||
result := schemapb.SearchResultData{
|
||||
NumQueries: nq,
|
||||
|
||||
@ -22,6 +22,11 @@ import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
const (
|
||||
GetCollectionStatisticsTaskName = "GetCollectionStatisticsTask"
|
||||
GetPartitionStatisticsTaskName = "GetPartitionStatisticsTask"
|
||||
)
|
||||
|
||||
type getStatisticsTask struct {
|
||||
request *milvuspb.GetStatisticsRequest
|
||||
result *milvuspb.GetStatisticsResponse
|
||||
|
||||
@ -324,7 +324,7 @@ func constructSearchRequest(
|
||||
OutputFields: nil,
|
||||
SearchParams: []*commonpb.KeyValuePair{
|
||||
{
|
||||
Key: MetricTypeKey,
|
||||
Key: common.MetricTypeKey,
|
||||
Value: distance.L2,
|
||||
},
|
||||
{
|
||||
@ -1866,7 +1866,7 @@ func Test_createIndexTask_getIndexedField(t *testing.T) {
|
||||
fieldName := "test"
|
||||
|
||||
cit := &createIndexTask{
|
||||
CreateIndexRequest: &milvuspb.CreateIndexRequest{
|
||||
req: &milvuspb.CreateIndexRequest{
|
||||
CollectionName: collectionName,
|
||||
FieldName: fieldName,
|
||||
},
|
||||
@ -2061,7 +2061,7 @@ func Test_createIndexTask_PreExecute(t *testing.T) {
|
||||
fieldName := "test"
|
||||
|
||||
cit := &createIndexTask{
|
||||
CreateIndexRequest: &milvuspb.CreateIndexRequest{
|
||||
req: &milvuspb.CreateIndexRequest{
|
||||
Base: &commonpb.MsgBase{
|
||||
MsgType: commonpb.MsgType_CreateIndex,
|
||||
},
|
||||
@ -2096,7 +2096,7 @@ func Test_createIndexTask_PreExecute(t *testing.T) {
|
||||
}, nil
|
||||
})
|
||||
globalMetaCache = cache
|
||||
cit.CreateIndexRequest.ExtraParams = []*commonpb.KeyValuePair{
|
||||
cit.req.ExtraParams = []*commonpb.KeyValuePair{
|
||||
{
|
||||
Key: "index_type",
|
||||
Value: "IVF_FLAT",
|
||||
|
||||
@ -739,3 +739,55 @@ func passwordVerify(ctx context.Context, username, rawPwd string, globalMetaCach
|
||||
globalMetaCache.UpdateCredential(credInfo)
|
||||
return true
|
||||
}
|
||||
|
||||
// Support wildcard in output fields:
|
||||
// "*" - all scalar fields
|
||||
// "%" - all vector fields
|
||||
// For example, A and B are scalar fields, C and D are vector fields, duplicated fields will automatically be removed.
|
||||
// output_fields=["*"] ==> [A,B]
|
||||
// output_fields=["%"] ==> [C,D]
|
||||
// output_fields=["*","%"] ==> [A,B,C,D]
|
||||
// output_fields=["*",A] ==> [A,B]
|
||||
// output_fields=["*",C] ==> [A,B,C]
|
||||
func translateOutputFields(outputFields []string, schema *schemapb.CollectionSchema, addPrimary bool) ([]string, error) {
|
||||
var primaryFieldName string
|
||||
scalarFieldNameMap := make(map[string]bool)
|
||||
vectorFieldNameMap := make(map[string]bool)
|
||||
resultFieldNameMap := make(map[string]bool)
|
||||
resultFieldNames := make([]string, 0)
|
||||
|
||||
for _, field := range schema.Fields {
|
||||
if field.IsPrimaryKey {
|
||||
primaryFieldName = field.Name
|
||||
}
|
||||
if field.DataType == schemapb.DataType_BinaryVector || field.DataType == schemapb.DataType_FloatVector {
|
||||
vectorFieldNameMap[field.Name] = true
|
||||
} else {
|
||||
scalarFieldNameMap[field.Name] = true
|
||||
}
|
||||
}
|
||||
|
||||
for _, outputFieldName := range outputFields {
|
||||
outputFieldName = strings.TrimSpace(outputFieldName)
|
||||
if outputFieldName == "*" {
|
||||
for fieldName := range scalarFieldNameMap {
|
||||
resultFieldNameMap[fieldName] = true
|
||||
}
|
||||
} else if outputFieldName == "%" {
|
||||
for fieldName := range vectorFieldNameMap {
|
||||
resultFieldNameMap[fieldName] = true
|
||||
}
|
||||
} else {
|
||||
resultFieldNameMap[outputFieldName] = true
|
||||
}
|
||||
}
|
||||
|
||||
if addPrimary {
|
||||
resultFieldNameMap[primaryFieldName] = true
|
||||
}
|
||||
|
||||
for fieldName := range resultFieldNameMap {
|
||||
resultFieldNames = append(resultFieldNames, fieldName)
|
||||
}
|
||||
return resultFieldNames, nil
|
||||
}
|
||||
|
||||
@ -59,7 +59,7 @@ const debugUT = false
|
||||
|
||||
const (
|
||||
dimKey = "dim"
|
||||
metricTypeKey = "metric_type"
|
||||
metricTypeKey = common.MetricTypeKey
|
||||
|
||||
defaultPKFieldName = "pk"
|
||||
defaultTopK = int64(10)
|
||||
|
||||
@ -110,12 +110,9 @@ func TestProxyClientManager_GetProxyClients(t *testing.T) {
|
||||
defer cli.Close()
|
||||
assert.Nil(t, err)
|
||||
core.etcdCli = cli
|
||||
|
||||
core.SetNewProxyClient(
|
||||
func(se *sessionutil.Session) (types.Proxy, error) {
|
||||
return nil, errors.New("failed")
|
||||
},
|
||||
)
|
||||
core.proxyCreator = func(se *sessionutil.Session) (types.Proxy, error) {
|
||||
return nil, errors.New("failed")
|
||||
}
|
||||
|
||||
pcm := newProxyClientManager(core.proxyCreator)
|
||||
|
||||
@ -138,11 +135,9 @@ func TestProxyClientManager_AddProxyClient(t *testing.T) {
|
||||
defer cli.Close()
|
||||
core.etcdCli = cli
|
||||
|
||||
core.SetNewProxyClient(
|
||||
func(se *sessionutil.Session) (types.Proxy, error) {
|
||||
return nil, errors.New("failed")
|
||||
},
|
||||
)
|
||||
core.proxyCreator = func(se *sessionutil.Session) (types.Proxy, error) {
|
||||
return nil, errors.New("failed")
|
||||
}
|
||||
|
||||
pcm := newProxyClientManager(core.proxyCreator)
|
||||
|
||||
|
||||
@ -32,6 +32,7 @@ import (
|
||||
"github.com/milvus-io/milvus/api/schemapb"
|
||||
"github.com/milvus-io/milvus/internal/allocator"
|
||||
"github.com/milvus-io/milvus/internal/common"
|
||||
pnc "github.com/milvus-io/milvus/internal/distributed/proxy/client"
|
||||
"github.com/milvus-io/milvus/internal/kv"
|
||||
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
|
||||
"github.com/milvus-io/milvus/internal/log"
|
||||
@ -144,6 +145,20 @@ func NewCore(c context.Context, factory dependency.Factory) (*Core, error) {
|
||||
enableActiveStandBy: Params.RootCoordCfg.EnableActiveStandby,
|
||||
}
|
||||
core.UpdateStateCode(internalpb.StateCode_Abnormal)
|
||||
core.proxyCreator = func(se *sessionutil.Session) (types.Proxy, error) {
|
||||
cli, err := pnc.NewClient(c, se.Address)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := cli.Init(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := cli.Start(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cli, nil
|
||||
}
|
||||
|
||||
return core, nil
|
||||
}
|
||||
|
||||
@ -223,10 +238,6 @@ func (c *Core) tsLoop() {
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Core) SetNewProxyClient(f func(sess *sessionutil.Session) (types.Proxy, error)) {
|
||||
c.proxyCreator = f
|
||||
}
|
||||
|
||||
func (c *Core) SetDataCoord(ctx context.Context, s types.DataCoord) error {
|
||||
if err := s.Init(); err != nil {
|
||||
return err
|
||||
|
||||
@ -29,7 +29,6 @@ import (
|
||||
"github.com/milvus-io/milvus/internal/proto/proxypb"
|
||||
"github.com/milvus-io/milvus/internal/proto/querypb"
|
||||
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
|
||||
"github.com/milvus-io/milvus/internal/util/sessionutil"
|
||||
)
|
||||
|
||||
// TimeTickProvider is the interface all services implement
|
||||
@ -785,9 +784,6 @@ type RootCoordComponent interface {
|
||||
// Always return nil.
|
||||
SetQueryCoord(queryCoord QueryCoord) error
|
||||
|
||||
// SetNewProxyClient set Proxy client creator func for RootCoord
|
||||
SetNewProxyClient(func(sess *sessionutil.Session) (Proxy, error))
|
||||
|
||||
// GetMetrics notifies RootCoordComponent to collect metrics for specified component
|
||||
GetMetrics(ctx context.Context, req *milvuspb.GetMetricsRequest) (*milvuspb.GetMetricsResponse, error)
|
||||
}
|
||||
|
||||
114
internal/util/autoindex/bigdata_index.go
Normal file
114
internal/util/autoindex/bigdata_index.go
Normal file
@ -0,0 +1,114 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type BigDataIndexExtraParams struct {
|
||||
PGCodeBudgetGBRatio float64
|
||||
BuildNumThreadsRatio float64
|
||||
SearchCacheBudgetGBRatio float64
|
||||
LoadNumThreadRatio float64
|
||||
BeamWidthRatio float64
|
||||
}
|
||||
|
||||
const (
|
||||
BuildRatioKey = "build_ratio"
|
||||
PrepareRatioKey = "prepare_ratio"
|
||||
BeamWidthRatioKey = "beamwidth_ratio"
|
||||
)
|
||||
|
||||
func NewBigDataIndexExtraParams() *BigDataIndexExtraParams {
|
||||
ret := &BigDataIndexExtraParams{
|
||||
PGCodeBudgetGBRatio: 0.125,
|
||||
BuildNumThreadsRatio: 1.0,
|
||||
SearchCacheBudgetGBRatio: 0.125,
|
||||
LoadNumThreadRatio: 8.0,
|
||||
BeamWidthRatio: 4.0,
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func NewBigDataExtraParamsFromJSON(jsonStr string) *BigDataIndexExtraParams {
|
||||
buffer := make(map[string]string)
|
||||
err := json.Unmarshal([]byte(jsonStr), &buffer)
|
||||
if err != nil {
|
||||
return NewBigDataIndexExtraParams()
|
||||
}
|
||||
return NewBigDataExtraParamsFromMap(buffer)
|
||||
}
|
||||
|
||||
func NewBigDataExtraParamsFromMap(value map[string]string) *BigDataIndexExtraParams {
|
||||
ret := &BigDataIndexExtraParams{}
|
||||
var err error
|
||||
buildRatio, ok := value[BuildRatioKey]
|
||||
if !ok {
|
||||
ret.PGCodeBudgetGBRatio = 0.125
|
||||
ret.BuildNumThreadsRatio = 1.0
|
||||
} else {
|
||||
valueMap1 := make(map[string]float64)
|
||||
err = json.Unmarshal([]byte(buildRatio), &valueMap1)
|
||||
if err != nil {
|
||||
ret.PGCodeBudgetGBRatio = 0.125
|
||||
ret.BuildNumThreadsRatio = 1.0
|
||||
} else {
|
||||
ret.PGCodeBudgetGBRatio = valueMap1["pg_code_budget_gb"]
|
||||
ret.BuildNumThreadsRatio = valueMap1["num_threads"]
|
||||
}
|
||||
}
|
||||
|
||||
prepareRatio, ok := value[PrepareRatioKey]
|
||||
if !ok {
|
||||
ret.SearchCacheBudgetGBRatio = 0.125
|
||||
ret.LoadNumThreadRatio = 8
|
||||
} else {
|
||||
valueMap2 := make(map[string]float64)
|
||||
err = json.Unmarshal([]byte(prepareRatio), &valueMap2)
|
||||
if err != nil {
|
||||
ret.SearchCacheBudgetGBRatio = 0.125
|
||||
ret.LoadNumThreadRatio = 8
|
||||
} else {
|
||||
SearchCacheBudgetGBRatio, ok := valueMap2["search_cache_budget_gb"]
|
||||
if !ok {
|
||||
ret.SearchCacheBudgetGBRatio = 0.125
|
||||
} else {
|
||||
ret.SearchCacheBudgetGBRatio = SearchCacheBudgetGBRatio
|
||||
}
|
||||
LoadNumThreadRatio, ok := valueMap2["num_threads"]
|
||||
if !ok {
|
||||
ret.LoadNumThreadRatio = 8
|
||||
} else {
|
||||
ret.LoadNumThreadRatio = LoadNumThreadRatio
|
||||
}
|
||||
}
|
||||
}
|
||||
beamWidthRatioStr, ok := value[BeamWidthRatioKey]
|
||||
if !ok {
|
||||
ret.BeamWidthRatio = 4.0
|
||||
} else {
|
||||
beamWidthRatio, err := strconv.ParseFloat(beamWidthRatioStr, 64)
|
||||
if err != nil {
|
||||
ret.BeamWidthRatio = 4.0
|
||||
} else {
|
||||
ret.BeamWidthRatio = beamWidthRatio
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
132
internal/util/autoindex/bigdata_index_test.go
Normal file
132
internal/util/autoindex/bigdata_index_test.go
Normal file
@ -0,0 +1,132 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestBigDataIndex_parse(t *testing.T) {
|
||||
t.Run("parse normal", func(t *testing.T) {
|
||||
mapString := make(map[string]string)
|
||||
mapString[BuildRatioKey] = "{\"pg_code_budget_gb\": 0.125, \"num_threads\": 1}"
|
||||
mapString[PrepareRatioKey] = "{\"search_cache_budget_gb\": 0.225, \"num_threads\": 8}"
|
||||
extraParams := NewBigDataExtraParamsFromMap(mapString)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.225, extraParams.SearchCacheBudgetGBRatio)
|
||||
})
|
||||
|
||||
t.Run("parse with partial", func(t *testing.T) {
|
||||
mapString := make(map[string]string)
|
||||
mapString[PrepareRatioKey] = "{\"search_cache_budget_gb\": 0.225, \"num_threads\": 8}"
|
||||
extraParams := NewBigDataExtraParamsFromMap(mapString)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.225, extraParams.SearchCacheBudgetGBRatio)
|
||||
})
|
||||
|
||||
t.Run("parse with empty", func(t *testing.T) {
|
||||
mapString := make(map[string]string)
|
||||
extraParams := NewBigDataExtraParamsFromMap(mapString)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.125, extraParams.SearchCacheBudgetGBRatio)
|
||||
})
|
||||
|
||||
t.Run("parse with nil", func(t *testing.T) {
|
||||
extraParams := NewBigDataExtraParamsFromMap(nil)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.125, extraParams.SearchCacheBudgetGBRatio)
|
||||
})
|
||||
|
||||
t.Run("new from json normal", func(t *testing.T) {
|
||||
jsonStr := `
|
||||
{
|
||||
"build_ratio": "{\"pg_code_budget_gb\": 0.125, \"num_threads\": 1}",
|
||||
"prepare_ratio": "{\"search_cache_budget_gb\": 0.225, \"num_threads\": 8}",
|
||||
"beamwidth_ratio": "8.0"
|
||||
}
|
||||
`
|
||||
extraParams := NewBigDataExtraParamsFromJSON(jsonStr)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.225, extraParams.SearchCacheBudgetGBRatio)
|
||||
assert.Equal(t, 8.0, extraParams.BeamWidthRatio)
|
||||
})
|
||||
|
||||
t.Run("new from json partial", func(t *testing.T) {
|
||||
jsonStr := `
|
||||
{
|
||||
"build_ratio": "{\"pg_code_budget_gb\": 0.125, \"num_threads\": 1}"
|
||||
}
|
||||
`
|
||||
extraParams := NewBigDataExtraParamsFromJSON(jsonStr)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.125, extraParams.SearchCacheBudgetGBRatio)
|
||||
assert.Equal(t, 4.0, extraParams.BeamWidthRatio)
|
||||
})
|
||||
|
||||
t.Run("new from json empty", func(t *testing.T) {
|
||||
jsonStr := `
|
||||
{
|
||||
}
|
||||
`
|
||||
extraParams := NewBigDataExtraParamsFromJSON(jsonStr)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.125, extraParams.SearchCacheBudgetGBRatio)
|
||||
assert.Equal(t, 4.0, extraParams.BeamWidthRatio)
|
||||
})
|
||||
|
||||
t.Run("new from json invalid1", func(t *testing.T) {
|
||||
jsonStr := `
|
||||
{ x
|
||||
}
|
||||
`
|
||||
extraParams := NewBigDataExtraParamsFromJSON(jsonStr)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.125, extraParams.SearchCacheBudgetGBRatio)
|
||||
assert.Equal(t, 4.0, extraParams.BeamWidthRatio)
|
||||
})
|
||||
|
||||
t.Run("new from json invalid1", func(t *testing.T) {
|
||||
jsonStr := `
|
||||
""
|
||||
`
|
||||
extraParams := NewBigDataExtraParamsFromJSON(jsonStr)
|
||||
assert.Equal(t, 1.0, extraParams.BuildNumThreadsRatio)
|
||||
assert.Equal(t, 8.0, extraParams.LoadNumThreadRatio)
|
||||
assert.Equal(t, 0.125, extraParams.PGCodeBudgetGBRatio)
|
||||
assert.Equal(t, 0.125, extraParams.SearchCacheBudgetGBRatio)
|
||||
assert.Equal(t, 4.0, extraParams.BeamWidthRatio)
|
||||
})
|
||||
|
||||
}
|
||||
86
internal/util/autoindex/function.go
Normal file
86
internal/util/autoindex/function.go
Normal file
@ -0,0 +1,86 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
//"strconv"
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/sandertv/go-formula/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
TopKKey = "topk"
|
||||
)
|
||||
|
||||
var _ calculateFunc = (*function)(nil)
|
||||
|
||||
type calculateFunc interface {
|
||||
calculate(params []*commonpb.KeyValuePair) (map[string]interface{}, error)
|
||||
GetInputKey() string
|
||||
GetOutputKey() string
|
||||
}
|
||||
|
||||
type function struct {
|
||||
f *formula.Formula
|
||||
input string
|
||||
inputKey string
|
||||
outputKey string
|
||||
}
|
||||
|
||||
func newFunction(stmt string) (*function, error) {
|
||||
input, output, expr, err := parseAssignment(stmt)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse function failed, wrong format:%w", err)
|
||||
}
|
||||
f, err := formula.New(expr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse function failed, wrong format:%w", err)
|
||||
}
|
||||
ret := &function{}
|
||||
ret.f = f
|
||||
ret.input = input
|
||||
ret.inputKey = strings.TrimPrefix(input, VariablePrefix)
|
||||
ret.outputKey = strings.TrimPrefix(output, VariablePrefix)
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (f *function) calculate(params []*commonpb.KeyValuePair) (map[string]interface{}, error) {
|
||||
inputValue, err := getInt64FromParams(params, f.inputKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
inputVar := formula.Var(f.input, inputValue)
|
||||
outputValue, err := f.f.Eval(inputVar)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("calculate failed:%w", err)
|
||||
}
|
||||
ret := make(map[string]interface{})
|
||||
ret[f.outputKey] = int64(outputValue)
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (f *function) GetInputKey() string {
|
||||
return f.inputKey
|
||||
}
|
||||
|
||||
func (f *function) GetOutputKey() string {
|
||||
return f.outputKey
|
||||
}
|
||||
118
internal/util/autoindex/function_test.go
Normal file
118
internal/util/autoindex/function_test.go
Normal file
@ -0,0 +1,118 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAutoIndexFunctionParse(t *testing.T) {
|
||||
var f calculateFunc
|
||||
var err error
|
||||
|
||||
validFuncStrs := []string{
|
||||
" __output = __input * 2 + 1",
|
||||
" __output = __input",
|
||||
" __output = 1.5 * pow(__input, 1.2) + 40",
|
||||
}
|
||||
for _, funcStr := range validFuncStrs {
|
||||
f, err = newFunction(funcStr)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, f)
|
||||
assert.Equal(t, "input", f.GetInputKey())
|
||||
assert.Equal(t, "output", f.GetOutputKey())
|
||||
}
|
||||
|
||||
invalidFuncStrs := []string{
|
||||
"",
|
||||
"{",
|
||||
" output = __input * 2 + 1",
|
||||
" __output __input * 2 + 1",
|
||||
" _output = __input * 2 + 1",
|
||||
}
|
||||
|
||||
for _, funcStr := range invalidFuncStrs {
|
||||
f, err = newFunction(funcStr)
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, f)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAutoIndexFunctionCalculate(t *testing.T) {
|
||||
var params []*commonpb.KeyValuePair
|
||||
inputKey := "input"
|
||||
outputKey := "output"
|
||||
params = append(params, &commonpb.KeyValuePair{
|
||||
Key: inputKey,
|
||||
Value: "10",
|
||||
})
|
||||
|
||||
var f calculateFunc
|
||||
var err error
|
||||
|
||||
t.Run("function1", func(t *testing.T) {
|
||||
funcStr := "__output = 4 * __input + 5"
|
||||
f, err = newFunction(funcStr)
|
||||
assert.NoError(t, err)
|
||||
tValue, err := f.calculate(params)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(4*10+5), tValue[outputKey].(int64))
|
||||
})
|
||||
|
||||
t.Run("function2", func(t *testing.T) {
|
||||
funcStr := "__output = 4 * pow(__input,2) + 6"
|
||||
f, err = newFunction(funcStr)
|
||||
assert.NoError(t, err)
|
||||
tValue, err := f.calculate(params)
|
||||
assert.NoError(t, err)
|
||||
targetV := int64(4*math.Pow(10, 2) + 6)
|
||||
assert.Equal(t, targetV, tValue[outputKey].(int64))
|
||||
})
|
||||
|
||||
t.Run("function3", func(t *testing.T) {
|
||||
funcStr := "__output = __input"
|
||||
f, err = newFunction(funcStr)
|
||||
assert.NoError(t, err)
|
||||
tValue, err := f.calculate(params)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(10), tValue[outputKey].(int64))
|
||||
})
|
||||
|
||||
t.Run("function4", func(t *testing.T) {
|
||||
funcStr := "__output_2 = 4 * pow(__input, 2) + 6"
|
||||
f, err = newFunction(funcStr)
|
||||
assert.NoError(t, err)
|
||||
tValue, err := f.calculate(params)
|
||||
assert.NoError(t, err)
|
||||
targetV := int64(4*math.Pow(10, 2) + 6)
|
||||
assert.Equal(t, targetV, tValue["output_2"].(int64))
|
||||
})
|
||||
|
||||
t.Run("function5", func(t *testing.T) {
|
||||
funcStr := "__output_3 = 1.5 * exp(__input*0.1) + 3"
|
||||
f, err = newFunction(funcStr)
|
||||
assert.NoError(t, err)
|
||||
tValue, err := f.calculate(params)
|
||||
assert.NoError(t, err)
|
||||
targetV := int64(1.5*math.Exp(10*0.1) + 3)
|
||||
assert.Equal(t, targetV, tValue["output_3"].(int64))
|
||||
})
|
||||
}
|
||||
164
internal/util/autoindex/method.go
Normal file
164
internal/util/autoindex/method.go
Normal file
@ -0,0 +1,164 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
)
|
||||
|
||||
var _ Calculator = (*methodPieceWise)(nil)
|
||||
var _ Calculator = (*methodNormal)(nil)
|
||||
|
||||
type Calculator interface {
|
||||
Calculate(params []*commonpb.KeyValuePair) (map[string]interface{}, error)
|
||||
}
|
||||
|
||||
type methodPieceWise struct {
|
||||
bp []float64
|
||||
functions []calculateFunc
|
||||
bpKey string
|
||||
}
|
||||
|
||||
func (m *methodPieceWise) Calculate(params []*commonpb.KeyValuePair) (map[string]interface{}, error) {
|
||||
bpValue, err := getInt64FromParams(params, m.bpKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
idx := 0
|
||||
for _, p := range m.bp {
|
||||
if bpValue < int64(p) {
|
||||
break
|
||||
}
|
||||
idx++
|
||||
}
|
||||
if idx >= len(m.functions) {
|
||||
// can not happen
|
||||
return nil, fmt.Errorf("calculate failed, methodPeiceWise functions size not match")
|
||||
}
|
||||
f := m.functions[idx]
|
||||
retMap, err := f.calculate(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return retMap, nil
|
||||
}
|
||||
|
||||
func newMethodPieceWise(jsonStr string) (*methodPieceWise, error) {
|
||||
valueMap := make(map[string]interface{})
|
||||
err := json.Unmarshal([]byte(jsonStr), &valueMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("newMethodPieceWise failed:%w", err)
|
||||
}
|
||||
return newMethodPieceWiseFromMap(valueMap)
|
||||
}
|
||||
|
||||
func newMethodPieceWiseFromMap(values map[string]interface{}) (*methodPieceWise, error) {
|
||||
var err error
|
||||
bpValue, ok := values["bp"]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parse piecewise function failed, bp not specified")
|
||||
}
|
||||
bpSlice, ok := bpValue.([]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parse piecewise bp failed, bp in wrong format")
|
||||
}
|
||||
var bpValues []float64
|
||||
for _, bpV := range bpSlice {
|
||||
bpFloat, ok := bpV.(float64)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parse piecewise bp failed, bp in wrong format")
|
||||
}
|
||||
bpValues = append(bpValues, bpFloat)
|
||||
}
|
||||
|
||||
funcs, ok := values["functions"]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parse piecewise function failed, functions not specified")
|
||||
}
|
||||
funcStrSlice, ok := funcs.([]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parse piecewise function failed, functions in wrong format")
|
||||
}
|
||||
var functions []calculateFunc
|
||||
for _, funcValue := range funcStrSlice {
|
||||
funcStr, ok := funcValue.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parse piecewise function failed, function in wrong format")
|
||||
}
|
||||
var f calculateFunc
|
||||
f, err = newFunction(funcStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
functions = append(functions, f)
|
||||
}
|
||||
|
||||
if len(bpValues)+1 != len(functions) {
|
||||
return nil, fmt.Errorf("parse piecewise function failed, function size not match to bp size")
|
||||
}
|
||||
ret := &methodPieceWise{
|
||||
bp: bpValues,
|
||||
functions: functions,
|
||||
bpKey: functions[0].GetInputKey(),
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
type methodNormal struct {
|
||||
function calculateFunc
|
||||
}
|
||||
|
||||
func (m *methodNormal) Calculate(params []*commonpb.KeyValuePair) (map[string]interface{}, error) {
|
||||
retMap, err := m.function.calculate(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return retMap, nil
|
||||
}
|
||||
|
||||
func newMethodNormal(jsonStr string) (*methodNormal, error) {
|
||||
valueMap := make(map[string]interface{})
|
||||
err := json.Unmarshal([]byte(jsonStr), &valueMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("newMethodNormal failed:%w", err)
|
||||
}
|
||||
return newMethodNormalFromMap(valueMap)
|
||||
}
|
||||
|
||||
func newMethodNormalFromMap(values map[string]interface{}) (*methodNormal, error) {
|
||||
var err error
|
||||
funcValue, ok := values["function"]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parse normal method failed, function not specified")
|
||||
}
|
||||
funcStr, ok := funcValue.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parse normal method failed, function in wrong format")
|
||||
}
|
||||
var f calculateFunc
|
||||
f, err = newFunction(funcStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := &methodNormal{
|
||||
function: f,
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
154
internal/util/autoindex/method_test.go
Normal file
154
internal/util/autoindex/method_test.go
Normal file
@ -0,0 +1,154 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAutoIndexMethodParse(t *testing.T) {
|
||||
var err error
|
||||
json1 := `{
|
||||
"function": "__output = __input * 10 + 5"
|
||||
}`
|
||||
m1, err := newMethodNormal(json1)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, m1)
|
||||
assert.NotNil(t, m1.function)
|
||||
|
||||
json2 := `{
|
||||
"bp": [10, 200],
|
||||
"functions": [
|
||||
"__output = __input",
|
||||
"__output = 10 * __input + 5",
|
||||
"__output = pow(__input, 1)"
|
||||
]
|
||||
}`
|
||||
m2, err := newMethodPieceWise(json2)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, m2)
|
||||
assert.NotNil(t, m2.functions)
|
||||
assert.Equal(t, 2, len(m2.bp))
|
||||
assert.Equal(t, 3, len(m2.functions))
|
||||
assert.Equal(t, "input", m2.bpKey)
|
||||
|
||||
invalid1JSONS := []string{
|
||||
"",
|
||||
`{}`,
|
||||
`{"": 1, "func": ""}`,
|
||||
`{"bp": [1,2], "function": ""}`,
|
||||
`{"bp": 1, "function": "xxx"}`,
|
||||
}
|
||||
|
||||
invalid2JSONS := []string{
|
||||
"",
|
||||
`{}`,
|
||||
`{"bp": 2}`,
|
||||
`{"bp": [2], "func": ""}`,
|
||||
`{"bp": [1,2], "function": ""}`,
|
||||
`{"functions": "xxx"}`,
|
||||
`{
|
||||
"bp": [10, 200],
|
||||
"functions": [
|
||||
"__output = __input",
|
||||
"__output = 10 * __input + 5",
|
||||
]
|
||||
}`,
|
||||
}
|
||||
|
||||
var m Calculator
|
||||
|
||||
for _, jsonStr := range invalid1JSONS {
|
||||
m, err = newMethodNormal(jsonStr)
|
||||
assert.NotNil(t, err)
|
||||
assert.Nil(t, m)
|
||||
}
|
||||
|
||||
for _, jsonStr := range invalid2JSONS {
|
||||
m, err = newMethodPieceWise(jsonStr)
|
||||
assert.NotNil(t, err)
|
||||
assert.Nil(t, m)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAutoIndexMethodCalculate(t *testing.T) {
|
||||
var err error
|
||||
|
||||
inputKey := "input"
|
||||
outputKey := "output"
|
||||
|
||||
var params []*commonpb.KeyValuePair
|
||||
params = append(params, &commonpb.KeyValuePair{
|
||||
Key: inputKey,
|
||||
Value: "10",
|
||||
})
|
||||
var method Calculator
|
||||
|
||||
t.Run("methodNormal", func(t *testing.T) {
|
||||
jsonStr := `{
|
||||
"function": "__output = 3 * pow(__input, 2) + 5"
|
||||
}`
|
||||
method, err = newMethodNormal(jsonStr)
|
||||
assert.NoError(t, err)
|
||||
|
||||
targetV := int64(3*math.Pow(10, 2) + 5)
|
||||
expMap := make(map[string]interface{})
|
||||
expMap[outputKey] = targetV
|
||||
expJSON, err := json.Marshal(expMap)
|
||||
expJSONStr := string(expJSON)
|
||||
assert.NoError(t, err)
|
||||
|
||||
ret, err := method.Calculate(params)
|
||||
assert.NoError(t, err)
|
||||
targetJSON, err := json.Marshal(ret)
|
||||
assert.NoError(t, err)
|
||||
targetJSONStr := string(targetJSON)
|
||||
assert.Equal(t, expJSONStr, targetJSONStr)
|
||||
})
|
||||
|
||||
t.Run("methodPieceWise", func(t *testing.T) {
|
||||
jsonStr := `{
|
||||
"bp": [10, 50],
|
||||
"functions": [
|
||||
"__output = __input",
|
||||
"__output = 3.0*pow(__input,2) + 5",
|
||||
"__output = 10 * __input + 5"
|
||||
]
|
||||
}`
|
||||
method, err = newMethodPieceWise(jsonStr)
|
||||
assert.NoError(t, err)
|
||||
|
||||
targetV := int64(3*math.Pow(10, 2) + 5)
|
||||
expMap := make(map[string]interface{})
|
||||
expMap[outputKey] = targetV
|
||||
expJSON, err := json.Marshal(expMap)
|
||||
expJSONStr := string(expJSON)
|
||||
assert.NoError(t, err)
|
||||
|
||||
ret, err := method.Calculate(params)
|
||||
assert.NoError(t, err)
|
||||
targetJSON, err := json.Marshal(ret)
|
||||
assert.NoError(t, err)
|
||||
targetJSONStr := string(targetJSON)
|
||||
assert.Equal(t, expJSONStr, targetJSONStr)
|
||||
})
|
||||
}
|
||||
87
internal/util/autoindex/parser.go
Normal file
87
internal/util/autoindex/parser.go
Normal file
@ -0,0 +1,87 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
type Parser struct {
|
||||
rw sync.RWMutex
|
||||
methods sync.Map // map of level to Calculator
|
||||
}
|
||||
|
||||
func NewParser() *Parser {
|
||||
return &Parser{
|
||||
methods: sync.Map{},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) InitFromJSONStr(value string) error {
|
||||
valueMap := make(map[string]interface{})
|
||||
err := json.Unmarshal([]byte(value), &valueMap)
|
||||
if err != nil {
|
||||
return fmt.Errorf("init autoindex parser failed:%w", err)
|
||||
}
|
||||
return p.InitFromMap(valueMap)
|
||||
}
|
||||
|
||||
func (p *Parser) InitFromMap(values map[string]interface{}) error {
|
||||
p.rw.Lock()
|
||||
defer p.rw.Unlock()
|
||||
p.methods = sync.Map{}
|
||||
var err error
|
||||
var cnt int
|
||||
for levelStr, value := range values {
|
||||
valueMap, ok := value.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("parse params failed, wrong format")
|
||||
}
|
||||
var level int
|
||||
level, err = strconv.Atoi(levelStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parse level failed, woring format")
|
||||
}
|
||||
|
||||
var method Calculator
|
||||
method, err = newMethodNormalFromMap(valueMap)
|
||||
if err != nil {
|
||||
method, err = newMethodPieceWiseFromMap(valueMap)
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("parse method failed %w", err)
|
||||
}
|
||||
p.methods.Store(level, method)
|
||||
cnt++
|
||||
}
|
||||
if cnt == 0 {
|
||||
return fmt.Errorf("parse method failed: empty")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) GetMethodByLevel(level int) (Calculator, bool) {
|
||||
m, ok := p.methods.Load(level)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
return m.(Calculator), true
|
||||
}
|
||||
143
internal/util/autoindex/parser_test.go
Normal file
143
internal/util/autoindex/parser_test.go
Normal file
@ -0,0 +1,143 @@
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAutoIndexParser_Parse(t *testing.T) {
|
||||
jsonStr := `
|
||||
{
|
||||
"1": {
|
||||
"function": "__output = 3*__input + 4"
|
||||
},
|
||||
"2": {
|
||||
"bp": [10, 200],
|
||||
"functions": [
|
||||
"__output = __input + 4",
|
||||
"__output = 3*__input + 4",
|
||||
"__output = pow(__input, 2) + 4"
|
||||
]
|
||||
},
|
||||
"3": {
|
||||
"bp": [10, 300],
|
||||
"functions": [
|
||||
"__output = __input + 4",
|
||||
"__output = 2*__input + 3",
|
||||
"__output = pow(__input, 1.2) + 4"
|
||||
]
|
||||
}
|
||||
}`
|
||||
parser := NewParser()
|
||||
err := parser.InitFromJSONStr(jsonStr)
|
||||
assert.NoError(t, err)
|
||||
|
||||
invalid1JSONS := []string{
|
||||
`{}`,
|
||||
`{"1":xxx}`,
|
||||
`{
|
||||
"1": {
|
||||
"func": "{"
|
||||
},
|
||||
"2": x
|
||||
}`,
|
||||
`{
|
||||
"1": {
|
||||
"function": "{\"funcID\": 3, \"cof1\": 3,\"cof2\": 4,\"cof3\": 5}"
|
||||
},
|
||||
"2": x
|
||||
}`,
|
||||
`{"": 1}`,
|
||||
`{"": 1, "func": ""}`,
|
||||
`{"": 1, "function": ""}`,
|
||||
`{1, "function": "xxx"}`,
|
||||
}
|
||||
|
||||
for _, jsonStr := range invalid1JSONS {
|
||||
err := parser.InitFromJSONStr(jsonStr)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAutoIndexParser_GetMethodByLevel(t *testing.T) {
|
||||
jsonStr := `
|
||||
{
|
||||
"1": {
|
||||
"function": "__output = __input"
|
||||
},
|
||||
"2": {
|
||||
"bp": [10, 50],
|
||||
"functions": [
|
||||
"__output = __input",
|
||||
"__output = 3.0*pow(__input,2) + 5",
|
||||
"__output = 10 * __input + 5"
|
||||
]
|
||||
},
|
||||
"3": {
|
||||
"bp": [10, 300],
|
||||
"functions": [
|
||||
"__output = 3.0*pow(__input,2) + 5",
|
||||
"__output = 10 * __input + 5",
|
||||
"__output = __input"
|
||||
]
|
||||
}
|
||||
}`
|
||||
|
||||
var err error
|
||||
parser := NewParser()
|
||||
err = parser.InitFromJSONStr(jsonStr)
|
||||
assert.NoError(t, err)
|
||||
|
||||
inputKey := "input"
|
||||
outputKey := "output"
|
||||
|
||||
var params []*commonpb.KeyValuePair
|
||||
params = append(params, &commonpb.KeyValuePair{
|
||||
Key: inputKey,
|
||||
Value: "10",
|
||||
})
|
||||
|
||||
assertValueMapEqual := func(source, target map[string]interface{}) {
|
||||
expJSON, err := json.Marshal(source)
|
||||
expJSONStr := string(expJSON)
|
||||
assert.NoError(t, err)
|
||||
|
||||
targetJSON, err := json.Marshal(target)
|
||||
assert.NoError(t, err)
|
||||
targetJSONStr := string(targetJSON)
|
||||
assert.Equal(t, expJSONStr, targetJSONStr)
|
||||
}
|
||||
|
||||
normalTargetValues := []int64{
|
||||
10,
|
||||
int64(3*math.Pow(10, 2) + 5),
|
||||
int64(10*10 + 5),
|
||||
}
|
||||
normalLevels := []int{1, 2, 3}
|
||||
|
||||
for i, l := range normalLevels {
|
||||
targetV := normalTargetValues[i]
|
||||
expMap := make(map[string]interface{})
|
||||
expMap[outputKey] = targetV
|
||||
|
||||
m, exist := parser.GetMethodByLevel(l)
|
||||
assert.NotNil(t, m)
|
||||
assert.True(t, exist)
|
||||
ret, err := m.Calculate(params)
|
||||
assert.NoError(t, err)
|
||||
assertValueMapEqual(expMap, ret)
|
||||
}
|
||||
|
||||
invalidLevels := []int{-1, 0, 4}
|
||||
|
||||
for _, l := range invalidLevels {
|
||||
m, exist := parser.GetMethodByLevel(l)
|
||||
assert.Nil(t, m)
|
||||
assert.False(t, exist)
|
||||
|
||||
}
|
||||
}
|
||||
113
internal/util/autoindex/util.go
Normal file
113
internal/util/autoindex/util.go
Normal file
@ -0,0 +1,113 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
|
||||
//"encoding/json"
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/milvus-io/milvus/internal/util/funcutil"
|
||||
)
|
||||
|
||||
const (
|
||||
VariablePrefix = "__"
|
||||
)
|
||||
|
||||
type identVisitor struct {
|
||||
input string
|
||||
valid bool
|
||||
}
|
||||
|
||||
func (v *identVisitor) Visit(n ast.Node) ast.Visitor {
|
||||
if n == nil {
|
||||
return nil
|
||||
}
|
||||
switch d := n.(type) {
|
||||
case *ast.Ident:
|
||||
if strings.HasPrefix(d.Name, VariablePrefix) {
|
||||
if v.valid {
|
||||
if v.input != d.Name {
|
||||
v.valid = false
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
v.input = d.Name
|
||||
v.valid = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func parseIdentFromExpr(expression string) (string, error) {
|
||||
expr, err := parser.ParseExpr(expression)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("parse input from expression failed: %v", err)
|
||||
}
|
||||
var x identVisitor
|
||||
ast.Walk(&x, expr)
|
||||
if !x.valid {
|
||||
return "", fmt.Errorf("parse input from expression failed: number of input variable should be 1")
|
||||
}
|
||||
return x.input, nil
|
||||
}
|
||||
|
||||
func parseAssignment(stmt string) (input string, output string, expr string, err error) {
|
||||
defer func() {
|
||||
if err != nil {
|
||||
input = ""
|
||||
output = ""
|
||||
expr = ""
|
||||
}
|
||||
}()
|
||||
exprs := strings.Split(stmt, "=")
|
||||
if len(exprs) != 2 {
|
||||
err = fmt.Errorf("parse assignment stmt failed, wrong format")
|
||||
return
|
||||
}
|
||||
output, err = parseIdentFromExpr(exprs[0])
|
||||
if err != nil {
|
||||
err = fmt.Errorf("parse assignment stmt failed, wrong lvalue format:%w", err)
|
||||
return
|
||||
}
|
||||
expr = exprs[1]
|
||||
input, err = parseIdentFromExpr(expr)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("parse assignment stmt failed, wrong rvalue format:%w", err)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func getInt64FromParams(params []*commonpb.KeyValuePair, key string) (int64, error) {
|
||||
valueStr, err := funcutil.GetAttrByKeyFromRepeatedKV(key, params)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("%s not found in search_params", key)
|
||||
}
|
||||
value, err := strconv.ParseInt(valueStr, 0, 64)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("%s [%s] is invalid", key, valueStr)
|
||||
}
|
||||
return value, nil
|
||||
}
|
||||
106
internal/util/autoindex/util_test.go
Normal file
106
internal/util/autoindex/util_test.go
Normal file
@ -0,0 +1,106 @@
|
||||
package autoindex
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/sandertv/go-formula/v2"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAutoIndexUtil_getInt64FromParams(t *testing.T) {
|
||||
var params []*commonpb.KeyValuePair
|
||||
params = append(params, &commonpb.KeyValuePair{
|
||||
Key: TopKKey,
|
||||
Value: "10",
|
||||
})
|
||||
|
||||
t.Run("getInt64FromParams1", func(t *testing.T) {
|
||||
topK, err := getInt64FromParams(params, TopKKey)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(10), topK)
|
||||
})
|
||||
|
||||
t.Run("getInt64FromParams2", func(t *testing.T) {
|
||||
topK, err := getInt64FromParams(params, "")
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, int64(0), topK)
|
||||
})
|
||||
|
||||
t.Run("getInt64FromParams3", func(t *testing.T) {
|
||||
var params []*commonpb.KeyValuePair
|
||||
params = append(params, &commonpb.KeyValuePair{
|
||||
Key: TopKKey,
|
||||
Value: "x",
|
||||
})
|
||||
|
||||
topK, err := getInt64FromParams(params, TopKKey)
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, int64(0), topK)
|
||||
})
|
||||
}
|
||||
|
||||
func TestAutoIndexUtil_parseAssignment(t *testing.T) {
|
||||
|
||||
t.Run("stmt1", func(t *testing.T) {
|
||||
stmt := "__output = 1 *pow(__input * 1.1, 4) + __input * 2 + 2"
|
||||
input, output, expr, err := parseAssignment(stmt)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "__output", output)
|
||||
assert.Equal(t, "__input", input)
|
||||
f, err := formula.New(expr)
|
||||
assert.NotNil(t, f)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("stmt2", func(t *testing.T) {
|
||||
stmt := "__output 1 *pox(__input * 1.1, 4) + __input * 2 + 2"
|
||||
input, output, expr, err := parseAssignment(stmt)
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "", output)
|
||||
assert.Equal(t, "", input)
|
||||
assert.Equal(t, "", expr)
|
||||
f, err := formula.New(expr)
|
||||
assert.Nil(t, f)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("stmt3", func(t *testing.T) {
|
||||
stmt := "_output = 1 *pox(__input * 1.1, 4) + __input * 2 + 2"
|
||||
input, output, expr, err := parseAssignment(stmt)
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "", output)
|
||||
assert.Equal(t, "", input)
|
||||
assert.Equal(t, "", expr)
|
||||
f, err := formula.New(expr)
|
||||
assert.Nil(t, f)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("stmt4", func(t *testing.T) {
|
||||
stmt := "__output = 1 *pox(_topk * 1.1, 4) + __input * 2 + 2"
|
||||
input, output, expr, err := parseAssignment(stmt)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "__output", output)
|
||||
assert.Equal(t, "__input", input)
|
||||
f, err := formula.New(expr)
|
||||
assert.NotNil(t, f)
|
||||
assert.NoError(t, err)
|
||||
inputVar := formula.Var(input, 1)
|
||||
_, err = f.Eval(inputVar)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("stmt5", func(t *testing.T) {
|
||||
stmt := "__output = 1 *pox(_topK * 1.1, 4) + _topK1 * 2 + 2"
|
||||
input, output, expr, err := parseAssignment(stmt)
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "", output)
|
||||
assert.Equal(t, "", input)
|
||||
assert.Equal(t, "", expr)
|
||||
f, err := formula.New(expr)
|
||||
assert.Nil(t, f)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
}
|
||||
@ -110,7 +110,7 @@ func ParseIndexParamsMap(mStr string) (map[string]string, error) {
|
||||
buffer := make(map[string]interface{})
|
||||
err := json.Unmarshal([]byte(mStr), &buffer)
|
||||
if err != nil {
|
||||
return nil, errors.New("Unmarshal params failed")
|
||||
return nil, errors.New("unmarshal params failed")
|
||||
}
|
||||
ret := make(map[string]string)
|
||||
for key, value := range buffer {
|
||||
|
||||
@ -17,11 +17,12 @@ import (
|
||||
|
||||
"github.com/milvus-io/milvus/api/commonpb"
|
||||
"github.com/milvus-io/milvus/api/milvuspb"
|
||||
"github.com/milvus-io/milvus/internal/common"
|
||||
)
|
||||
|
||||
const (
|
||||
// MetricTypeKey are the key of metric type in GetMetrics request.
|
||||
MetricTypeKey = "metric_type"
|
||||
MetricTypeKey = common.MetricTypeKey
|
||||
|
||||
// SystemInfoMetrics means users request for system information metrics.
|
||||
SystemInfoMetrics = "system_info"
|
||||
|
||||
117
internal/util/paramtable/autoindex_param.go
Normal file
117
internal/util/paramtable/autoindex_param.go
Normal file
@ -0,0 +1,117 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package paramtable
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/milvus-io/milvus/internal/common"
|
||||
"github.com/milvus-io/milvus/internal/util/autoindex"
|
||||
"github.com/milvus-io/milvus/internal/util/funcutil"
|
||||
)
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// --- common ---
|
||||
type autoIndexConfig struct {
|
||||
Base *BaseTable
|
||||
|
||||
Enable bool
|
||||
|
||||
indexParamsStr string
|
||||
IndexParams map[string]string
|
||||
|
||||
extraParamsStr string
|
||||
BigDataExtraParams *autoindex.BigDataIndexExtraParams
|
||||
|
||||
SearchParamsYamlStr string
|
||||
|
||||
IndexType string
|
||||
AutoIndexTypeName string
|
||||
Parser *autoindex.Parser
|
||||
}
|
||||
|
||||
func (p *autoIndexConfig) init(base *BaseTable) {
|
||||
p.Base = base
|
||||
p.initEnable() // must call at first
|
||||
p.initParams()
|
||||
}
|
||||
|
||||
func (p *autoIndexConfig) initEnable() {
|
||||
var err error
|
||||
enable := p.Base.LoadWithDefault("autoIndex.enable", "false")
|
||||
p.Enable, err = strconv.ParseBool(enable)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *autoIndexConfig) initParams() {
|
||||
if !p.Enable {
|
||||
// init a default ExtraParams
|
||||
p.BigDataExtraParams = autoindex.NewBigDataIndexExtraParams()
|
||||
return
|
||||
}
|
||||
p.indexParamsStr = p.Base.LoadWithDefault("autoIndex.params.build", "")
|
||||
p.parseBuildParams(p.indexParamsStr)
|
||||
|
||||
p.SearchParamsYamlStr = p.Base.LoadWithDefault("autoIndex.params.search", "")
|
||||
p.parseSearchParams(p.SearchParamsYamlStr)
|
||||
p.AutoIndexTypeName = p.Base.LoadWithDefault("autoIndex.type", "")
|
||||
p.extraParamsStr = p.Base.LoadWithDefault("autoIndex.params.extra", "")
|
||||
p.parseExtraParams(p.extraParamsStr)
|
||||
}
|
||||
|
||||
func (p *autoIndexConfig) parseBuildParams(paramsStr string) {
|
||||
var err error
|
||||
p.IndexParams, err = funcutil.ParseIndexParamsMap(paramsStr)
|
||||
if err != nil {
|
||||
err2 := fmt.Errorf("parse autoindex build params failed:%w", err)
|
||||
panic(err2)
|
||||
}
|
||||
var ok bool
|
||||
p.IndexType, ok = p.IndexParams[common.IndexTypeKey]
|
||||
if !ok {
|
||||
err2 := fmt.Errorf("parse autoindex %s failed:%w", common.IndexTypeKey, err)
|
||||
panic(err2)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *autoIndexConfig) parseExtraParams(paramsStr string) {
|
||||
p.BigDataExtraParams = autoindex.NewBigDataExtraParamsFromJSON(paramsStr)
|
||||
}
|
||||
|
||||
func (p *autoIndexConfig) parseSearchParams(paramsStr string) {
|
||||
p.Parser = autoindex.NewParser()
|
||||
err := p.Parser.InitFromJSONStr(paramsStr)
|
||||
if err != nil {
|
||||
err2 := fmt.Errorf("parse autoindex search params failed:%w", err)
|
||||
panic(err2)
|
||||
}
|
||||
}
|
||||
|
||||
// GetSearchParamStrCalculator return a method which can calculate searchParams
|
||||
func (p *autoIndexConfig) GetSearchParamStrCalculator(level int) autoindex.Calculator {
|
||||
if !p.Enable {
|
||||
return nil
|
||||
}
|
||||
m, ok := p.Parser.GetMethodByLevel(level)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return m
|
||||
}
|
||||
146
internal/util/paramtable/autoindex_param_test.go
Normal file
146
internal/util/paramtable/autoindex_param_test.go
Normal file
@ -0,0 +1,146 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package paramtable
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/milvus-io/milvus/internal/common"
|
||||
"github.com/milvus-io/milvus/internal/util/autoindex"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const (
|
||||
MetricTypeKey = common.MetricTypeKey
|
||||
IndexTypeKey = common.IndexTypeKey
|
||||
)
|
||||
|
||||
func TestAutoIndexParams_build(t *testing.T) {
|
||||
var CParams ComponentParam
|
||||
CParams.Init()
|
||||
|
||||
t.Run("test parseBuildParams success", func(t *testing.T) {
|
||||
//Params := CParams.AutoIndexConfig
|
||||
//buildParams := make([string]interface)
|
||||
var err error
|
||||
map1 := map[string]interface{}{
|
||||
IndexTypeKey: "HNSW",
|
||||
"M": 48,
|
||||
"efConstruction": 500,
|
||||
}
|
||||
var jsonStrBytes []byte
|
||||
jsonStrBytes, err = json.Marshal(map1)
|
||||
assert.NoError(t, err)
|
||||
CParams.AutoIndexConfig.parseBuildParams(string(jsonStrBytes))
|
||||
assert.Equal(t, "HNSW", CParams.AutoIndexConfig.IndexType)
|
||||
assert.Equal(t, strconv.Itoa(map1["M"].(int)), CParams.AutoIndexConfig.IndexParams["M"])
|
||||
assert.Equal(t, strconv.Itoa(map1["efConstruction"].(int)), CParams.AutoIndexConfig.IndexParams["efConstruction"])
|
||||
|
||||
map2 := map[string]interface{}{
|
||||
IndexTypeKey: "IVF_FLAT",
|
||||
"nlist": 1024,
|
||||
}
|
||||
jsonStrBytes, err = json.Marshal(map2)
|
||||
assert.NoError(t, err)
|
||||
CParams.AutoIndexConfig.parseBuildParams(string(jsonStrBytes))
|
||||
assert.Equal(t, "IVF_FLAT", CParams.AutoIndexConfig.IndexType)
|
||||
assert.Equal(t, strconv.Itoa(map2["nlist"].(int)), CParams.AutoIndexConfig.IndexParams["nlist"])
|
||||
})
|
||||
|
||||
t.Run("test parseBuildParams miss total", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r == nil {
|
||||
t.Errorf("The code did not panic")
|
||||
}
|
||||
}()
|
||||
CParams.AutoIndexConfig.parseBuildParams("")
|
||||
})
|
||||
|
||||
t.Run("test parseBuildParams miss index_type", func(t *testing.T) {
|
||||
defer func() {
|
||||
if r := recover(); r == nil {
|
||||
t.Errorf("The code did not panic")
|
||||
}
|
||||
}()
|
||||
var err error
|
||||
map1 := map[string]interface{}{
|
||||
"M": 48,
|
||||
"efConstruction": 500,
|
||||
}
|
||||
var jsonStrBytes []byte
|
||||
jsonStrBytes, err = json.Marshal(map1)
|
||||
assert.NoError(t, err)
|
||||
CParams.AutoIndexConfig.parseBuildParams(string(jsonStrBytes))
|
||||
})
|
||||
}
|
||||
|
||||
func TestAutoIndexParams_search1(t *testing.T) {
|
||||
var CParams ComponentParam
|
||||
CParams.Init()
|
||||
CParams.AutoIndexConfig.Enable = true
|
||||
|
||||
var err error
|
||||
indexMap := map[string]interface{}{
|
||||
IndexTypeKey: "HNSW",
|
||||
"M": 48,
|
||||
"efConstruction": 500,
|
||||
}
|
||||
var jsonStrBytes []byte
|
||||
jsonStrBytes, err = json.Marshal(indexMap)
|
||||
assert.NoError(t, err)
|
||||
CParams.AutoIndexConfig.parseBuildParams(string(jsonStrBytes))
|
||||
|
||||
jsonStr := `
|
||||
{
|
||||
"1": {
|
||||
"function": "__output = 3*__input + 4"
|
||||
},
|
||||
"2": {
|
||||
"bp": [10, 200],
|
||||
"functions": [
|
||||
"__output = __input + 4",
|
||||
"__output = 3*__input + 4",
|
||||
"__output = pow(__input, 2) + 4"
|
||||
]
|
||||
},
|
||||
"3": {
|
||||
"bp": [10, 300],
|
||||
"functions": [
|
||||
"__output = __input + 4",
|
||||
"__output = 2*__input + 3",
|
||||
"__output = pow(__input, 1.2) + 4"
|
||||
]
|
||||
}
|
||||
}`
|
||||
|
||||
CParams.AutoIndexConfig.Parser = autoindex.NewParser()
|
||||
CParams.AutoIndexConfig.Parser.InitFromJSONStr(jsonStr)
|
||||
assert.NoError(t, err)
|
||||
|
||||
normalLevels := []int{1, 2, 3}
|
||||
for _, l := range normalLevels {
|
||||
m := CParams.AutoIndexConfig.GetSearchParamStrCalculator(l)
|
||||
assert.NotNil(t, m)
|
||||
}
|
||||
invalidLevels := []int{-1, 0, 4}
|
||||
for _, l := range invalidLevels {
|
||||
m := CParams.AutoIndexConfig.GetSearchParamStrCalculator(l)
|
||||
assert.Nil(t, m)
|
||||
}
|
||||
}
|
||||
@ -44,8 +44,9 @@ type ComponentParam struct {
|
||||
ServiceParam
|
||||
once sync.Once
|
||||
|
||||
CommonCfg commonConfig
|
||||
QuotaConfig quotaConfig
|
||||
CommonCfg commonConfig
|
||||
QuotaConfig quotaConfig
|
||||
AutoIndexConfig autoIndexConfig
|
||||
|
||||
RootCoordCfg rootCoordConfig
|
||||
ProxyCfg proxyConfig
|
||||
@ -71,6 +72,7 @@ func (p *ComponentParam) Init() {
|
||||
|
||||
p.CommonCfg.init(&p.BaseTable)
|
||||
p.QuotaConfig.init(&p.BaseTable)
|
||||
p.AutoIndexConfig.init(&p.BaseTable)
|
||||
|
||||
p.RootCoordCfg.init(&p.BaseTable)
|
||||
p.ProxyCfg.init(&p.BaseTable)
|
||||
|
||||
@ -111,6 +111,7 @@ go test -race -cover ${APPLE_SILICON_FLAG} "${MILVUS_DIR}/config/..." -failfast
|
||||
|
||||
function test_util()
|
||||
{
|
||||
go test -race -cover ${APPLE_SILICON_FLAG} "${MILVUS_DIR}/util/autoindex/..." -failfast
|
||||
go test -race -cover ${APPLE_SILICON_FLAG} "${MILVUS_DIR}/util/funcutil/..." -failfast
|
||||
go test -race -cover ${APPLE_SILICON_FLAG} "${MILVUS_DIR}/util/paramtable/..." -failfast
|
||||
go test -race -cover ${APPLE_SILICON_FLAG} "${MILVUS_DIR}/util/retry/..." -failfast
|
||||
|
||||
@ -110,6 +110,8 @@ CREATE TABLE if not exists milvus_meta.`indexes` (
|
||||
index_id BIGINT NOT NULL,
|
||||
index_name VARCHAR(256),
|
||||
index_params VARCHAR(2048),
|
||||
user_index_params VARCHAR(2048),
|
||||
is_auto_index BOOL DEFAULT FALSE,
|
||||
create_time bigint unsigned,
|
||||
is_deleted BOOL DEFAULT FALSE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
@ -9,7 +9,7 @@ allure-pytest==2.7.0
|
||||
pytest-print==0.2.1
|
||||
pytest-level==0.1.1
|
||||
pytest-xdist==2.5.0
|
||||
pymilvus==2.2.0.dev33
|
||||
pymilvus==2.2.0.dev40
|
||||
pytest-rerunfailures==9.1.1
|
||||
git+https://github.com/Projectplace/pytest-tags
|
||||
ndg-httpsclient
|
||||
@ -35,4 +35,4 @@ protobuf==3.17.1
|
||||
minio==7.1.5
|
||||
|
||||
# for benchmark
|
||||
h5py==3.1.0
|
||||
h5py==3.1.0
|
||||
|
||||
@ -542,7 +542,7 @@ class TestConnectionOperation(TestcaseBase):
|
||||
check_items={ct.dict_content: {"address": f"{host}:{port}",
|
||||
"user": ""}})
|
||||
|
||||
@pytest.mark.tags(ct.CaseLabel.L1)
|
||||
@pytest.mark.tags(ct.CaseLabel.L2)
|
||||
@pytest.mark.parametrize("connect_name", [DefaultConfig.DEFAULT_USING, "test_alias_nme"])
|
||||
def test_connection_connect_wrong_params(self, host, port, connect_name):
|
||||
"""
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user