Support TTL for collection level (#19383)

Signed-off-by: yun.zhang <yun.zhang@zilliz.com>

Signed-off-by: yun.zhang <yun.zhang@zilliz.com>
This commit is contained in:
jaime 2022-10-10 20:31:22 +08:00 committed by GitHub
parent 1c0e1ccfbf
commit 31db01b6ae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
69 changed files with 3181 additions and 1515 deletions

View File

@ -323,6 +323,7 @@ const (
MsgType_CreateAlias MsgType = 108 MsgType_CreateAlias MsgType = 108
MsgType_DropAlias MsgType = 109 MsgType_DropAlias MsgType = 109
MsgType_AlterAlias MsgType = 110 MsgType_AlterAlias MsgType = 110
MsgType_AlterCollection MsgType = 111
// DEFINITION REQUESTS: PARTITION // DEFINITION REQUESTS: PARTITION
MsgType_CreatePartition MsgType = 200 MsgType_CreatePartition MsgType = 200
MsgType_DropPartition MsgType = 201 MsgType_DropPartition MsgType = 201
@ -415,6 +416,7 @@ var MsgType_name = map[int32]string{
108: "CreateAlias", 108: "CreateAlias",
109: "DropAlias", 109: "DropAlias",
110: "AlterAlias", 110: "AlterAlias",
111: "AlterCollection",
200: "CreatePartition", 200: "CreatePartition",
201: "DropPartition", 201: "DropPartition",
202: "HasPartition", 202: "HasPartition",
@ -498,6 +500,7 @@ var MsgType_value = map[string]int32{
"CreateAlias": 108, "CreateAlias": 108,
"DropAlias": 109, "DropAlias": 109,
"AlterAlias": 110, "AlterAlias": 110,
"AlterCollection": 111,
"CreatePartition": 200, "CreatePartition": 200,
"DropPartition": 201, "DropPartition": 201,
"HasPartition": 202, "HasPartition": 202,
@ -1436,168 +1439,168 @@ func init() {
func init() { proto.RegisterFile("common.proto", fileDescriptor_555bd8c177793206) } func init() { proto.RegisterFile("common.proto", fileDescriptor_555bd8c177793206) }
var fileDescriptor_555bd8c177793206 = []byte{ var fileDescriptor_555bd8c177793206 = []byte{
// 2597 bytes of a gzipped FileDescriptorProto // 2603 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0xd9, 0x73, 0x63, 0x47, 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0xd9, 0x73, 0x63, 0x47,
0xd5, 0x9f, 0x2b, 0xc9, 0xf6, 0xa8, 0xa5, 0xb1, 0x8f, 0xdb, 0x1e, 0x8f, 0x32, 0x4b, 0xc6, 0xf1, 0xd5, 0x9f, 0x2b, 0xc9, 0xf6, 0xa8, 0x25, 0xdb, 0x67, 0xda, 0x1e, 0x8f, 0x32, 0x4b, 0xc6, 0xf1,
0x97, 0x7c, 0x18, 0x91, 0x78, 0x92, 0x49, 0x15, 0x50, 0x54, 0x85, 0xc2, 0x96, 0x6c, 0x8f, 0x2a, 0x97, 0x7c, 0x18, 0x91, 0x78, 0x92, 0x49, 0x15, 0x50, 0x54, 0x85, 0xc2, 0x96, 0x6c, 0x8f, 0x2a,
0xde, 0x90, 0x3d, 0x81, 0xa2, 0x0a, 0xa6, 0x5a, 0xf7, 0x1e, 0xcb, 0x3d, 0x73, 0x75, 0xfb, 0x72, 0xde, 0x90, 0x3d, 0x81, 0xa2, 0x0a, 0xa6, 0x5a, 0xf7, 0x1e, 0xcb, 0x3d, 0x73, 0x75, 0xfb, 0x72,
0xbb, 0xe5, 0xb1, 0xf2, 0x14, 0x02, 0xe4, 0x85, 0x17, 0x96, 0x2a, 0x9e, 0xf9, 0x03, 0x20, 0xc5, 0xbb, 0xe5, 0xb1, 0xf2, 0x14, 0x02, 0xe4, 0x85, 0xa2, 0x8a, 0xa5, 0x8a, 0x67, 0xfe, 0x00, 0x48,
0x0e, 0x8f, 0xec, 0x24, 0x6c, 0xcf, 0xec, 0xf0, 0x08, 0xef, 0x2c, 0x21, 0x2b, 0x75, 0xfa, 0xae, 0xb1, 0xc3, 0x23, 0x3b, 0x09, 0xdb, 0x33, 0x3b, 0x3c, 0xc2, 0x3b, 0x6b, 0x56, 0xea, 0xf4, 0x5d,
0xf2, 0x4c, 0xe0, 0x81, 0x37, 0xf5, 0xef, 0x9c, 0x3e, 0x5b, 0x9f, 0xed, 0x8a, 0xd5, 0x5d, 0x35, 0xe5, 0x99, 0xc0, 0x03, 0x6f, 0xea, 0xdf, 0x39, 0x7d, 0xb6, 0x3e, 0xdb, 0x15, 0xab, 0xbb, 0x6a,
0x18, 0xa8, 0x60, 0x25, 0x8c, 0x94, 0x51, 0x7c, 0x6e, 0x20, 0xfd, 0xe3, 0xa1, 0x8e, 0x4f, 0x2b, 0x30, 0x50, 0xc1, 0x4a, 0x18, 0x29, 0xa3, 0xf8, 0xdc, 0x40, 0xfa, 0xc7, 0x43, 0x1d, 0x9f, 0x56,
0x31, 0xe9, 0xe2, 0x62, 0x5f, 0xa9, 0xbe, 0x8f, 0xd7, 0x2c, 0xd8, 0x1b, 0x1e, 0x5e, 0xf3, 0x50, 0x62, 0xd2, 0xc5, 0xc5, 0xbe, 0x52, 0x7d, 0x1f, 0xaf, 0x59, 0xb0, 0x37, 0x3c, 0xbc, 0xe6, 0xa1,
0xbb, 0x91, 0x0c, 0x8d, 0x8a, 0x62, 0xc6, 0xa5, 0x5b, 0x6c, 0x72, 0xdf, 0x08, 0x33, 0xd4, 0xfc, 0x76, 0x23, 0x19, 0x1a, 0x15, 0xc5, 0x8c, 0x4b, 0xb7, 0xd8, 0xe4, 0xbe, 0x11, 0x66, 0xa8, 0xf9,
0x29, 0xc6, 0x30, 0x8a, 0x54, 0x74, 0xcb, 0x55, 0x1e, 0x36, 0x9c, 0x45, 0x67, 0x79, 0xfa, 0xfa, 0x53, 0x8c, 0x61, 0x14, 0xa9, 0xe8, 0x96, 0xab, 0x3c, 0x6c, 0x38, 0x8b, 0xce, 0xf2, 0xcc, 0xf5,
0x83, 0x2b, 0xf7, 0x91, 0xba, 0xb2, 0x4e, 0x6c, 0x2d, 0xe5, 0x61, 0xb7, 0x8a, 0xe9, 0x4f, 0xbe, 0x07, 0x57, 0xee, 0x23, 0x75, 0x65, 0x9d, 0xd8, 0x5a, 0xca, 0xc3, 0x6e, 0x15, 0xd3, 0x9f, 0x7c,
0xc0, 0x26, 0x23, 0x14, 0x5a, 0x05, 0x8d, 0xd2, 0xa2, 0xb3, 0x5c, 0xed, 0x26, 0xa7, 0xa5, 0x77, 0x81, 0x4d, 0x46, 0x28, 0xb4, 0x0a, 0x1a, 0xa5, 0x45, 0x67, 0xb9, 0xda, 0x4d, 0x4e, 0x4b, 0xef,
0xb3, 0xfa, 0xd3, 0x38, 0x7a, 0x46, 0xf8, 0x43, 0xdc, 0x13, 0x32, 0xe2, 0xc0, 0xca, 0x77, 0x70, 0x64, 0xf5, 0xa7, 0x71, 0xf4, 0x8c, 0xf0, 0x87, 0xb8, 0x27, 0x64, 0xc4, 0x81, 0x95, 0xef, 0xe0,
0x64, 0xe5, 0x57, 0xbb, 0xf4, 0x93, 0xcf, 0xb3, 0x89, 0x63, 0x22, 0x27, 0x17, 0xe3, 0xc3, 0xd2, 0xc8, 0xca, 0xaf, 0x76, 0xe9, 0x27, 0x9f, 0x67, 0x13, 0xc7, 0x44, 0x4e, 0x2e, 0xc6, 0x87, 0xa5,
0x93, 0xac, 0xf6, 0x34, 0x8e, 0xda, 0xc2, 0x88, 0xb7, 0xb9, 0xc6, 0x59, 0xc5, 0x13, 0x46, 0xd8, 0x27, 0x59, 0xed, 0x69, 0x1c, 0xb5, 0x85, 0x11, 0x6f, 0x71, 0x8d, 0xb3, 0x8a, 0x27, 0x8c, 0xb0,
0x5b, 0xf5, 0xae, 0xfd, 0xbd, 0x74, 0x99, 0x55, 0xd6, 0x7c, 0xd5, 0xcb, 0x45, 0x3a, 0x96, 0x98, 0xb7, 0xea, 0x5d, 0xfb, 0x7b, 0xe9, 0x32, 0xab, 0xac, 0xf9, 0xaa, 0x97, 0x8b, 0x74, 0x2c, 0x31,
0x88, 0x3c, 0x66, 0xb0, 0xe7, 0x0b, 0x17, 0x8f, 0x94, 0xef, 0x61, 0x64, 0x4d, 0x22, 0xb9, 0x46, 0x11, 0x79, 0xcc, 0x60, 0xcf, 0x17, 0x2e, 0x1e, 0x29, 0xdf, 0xc3, 0xc8, 0x9a, 0x44, 0x72, 0x8d,
0xf4, 0x53, 0xb9, 0x46, 0xf4, 0xf9, 0x7b, 0x59, 0xc5, 0x8c, 0xc2, 0xd8, 0x9a, 0xe9, 0xeb, 0x0f, 0xe8, 0xa7, 0x72, 0x8d, 0xe8, 0xf3, 0x77, 0xb3, 0x8a, 0x19, 0x85, 0xb1, 0x35, 0x33, 0xd7, 0x1f,
0xdf, 0x37, 0x02, 0x05, 0x31, 0x07, 0xa3, 0x10, 0xbb, 0xf6, 0x06, 0x85, 0xc0, 0x2a, 0xd2, 0x8d, 0xbe, 0x6f, 0x04, 0x0a, 0x62, 0x0e, 0x46, 0x21, 0x76, 0xed, 0x0d, 0x0a, 0x81, 0x55, 0xa4, 0x1b,
0xf2, 0x62, 0x79, 0xb9, 0xde, 0x4d, 0x4e, 0x4b, 0x1f, 0x1d, 0xd3, 0xbb, 0x19, 0xa9, 0x61, 0xc8, 0xe5, 0xc5, 0xf2, 0x72, 0xbd, 0x9b, 0x9c, 0x96, 0x3e, 0x3c, 0xa6, 0x77, 0x33, 0x52, 0xc3, 0x90,
0x3b, 0xac, 0x1e, 0xe6, 0x98, 0x6e, 0x38, 0x8b, 0xe5, 0xe5, 0xda, 0xf5, 0x47, 0xfe, 0x9b, 0x36, 0x77, 0x58, 0x3d, 0xcc, 0x31, 0xdd, 0x70, 0x16, 0xcb, 0xcb, 0xb5, 0xeb, 0x8f, 0xfc, 0x37, 0x6d,
0x6b, 0x74, 0x77, 0xec, 0xea, 0xd2, 0x63, 0x6c, 0x6a, 0xd5, 0xf3, 0x22, 0xd4, 0x9a, 0x4f, 0xb3, 0xd6, 0xe8, 0xee, 0xd8, 0xd5, 0xa5, 0xc7, 0xd8, 0xd4, 0xaa, 0xe7, 0x45, 0xa8, 0x35, 0x9f, 0x61,
0x92, 0x0c, 0x13, 0x67, 0x4a, 0x32, 0xa4, 0x18, 0x85, 0x2a, 0x32, 0xd6, 0x97, 0x72, 0xd7, 0xfe, 0x25, 0x19, 0x26, 0xce, 0x94, 0x64, 0x48, 0x31, 0x0a, 0x55, 0x64, 0xac, 0x2f, 0xe5, 0xae, 0xfd,
0x5e, 0x7a, 0xd1, 0x61, 0x53, 0xdb, 0xba, 0xbf, 0x26, 0x34, 0xf2, 0xf7, 0xb0, 0xb3, 0x03, 0xdd, 0xbd, 0xf4, 0xa2, 0xc3, 0xa6, 0xb6, 0x75, 0x7f, 0x4d, 0x68, 0xe4, 0xef, 0x62, 0x67, 0x07, 0xba,
0xbf, 0x65, 0xfd, 0x8d, 0x5f, 0xfc, 0xf2, 0x7d, 0x2d, 0xd8, 0xd6, 0x7d, 0xeb, 0xe7, 0xd4, 0x20, 0x7f, 0xcb, 0xfa, 0x1b, 0xbf, 0xf8, 0xe5, 0xfb, 0x5a, 0xb0, 0xad, 0xfb, 0xd6, 0xcf, 0xa9, 0x41,
0xfe, 0x41, 0x01, 0x1e, 0xe8, 0x7e, 0xa7, 0x9d, 0x48, 0x8e, 0x0f, 0xfc, 0x32, 0xab, 0x1a, 0x39, 0xfc, 0x83, 0x02, 0x3c, 0xd0, 0xfd, 0x4e, 0x3b, 0x91, 0x1c, 0x1f, 0xf8, 0x65, 0x56, 0x35, 0x72,
0x40, 0x6d, 0xc4, 0x20, 0x6c, 0x94, 0x17, 0x9d, 0xe5, 0x4a, 0x37, 0x07, 0xf8, 0x45, 0x76, 0x56, 0x80, 0xda, 0x88, 0x41, 0xd8, 0x28, 0x2f, 0x3a, 0xcb, 0x95, 0x6e, 0x0e, 0xf0, 0x8b, 0xec, 0xac,
0xab, 0x61, 0xe4, 0x62, 0xa7, 0xdd, 0xa8, 0xd8, 0x6b, 0xd9, 0x99, 0x68, 0x46, 0x44, 0x7d, 0x34, 0x56, 0xc3, 0xc8, 0xc5, 0x4e, 0xbb, 0x51, 0xb1, 0xd7, 0xb2, 0x33, 0xd1, 0x8c, 0x88, 0xfa, 0x68,
0x9d, 0x76, 0x63, 0x22, 0xa6, 0xa5, 0xe7, 0xa5, 0xa7, 0x58, 0x75, 0x5b, 0xf7, 0x6f, 0xa0, 0xf0, 0x3a, 0xed, 0xc6, 0x44, 0x4c, 0x4b, 0xcf, 0x4b, 0x4f, 0xb1, 0xea, 0xb6, 0xee, 0xdf, 0x40, 0xe1,
0x30, 0xe2, 0x8f, 0xb3, 0x4a, 0x4f, 0xe8, 0xd8, 0xda, 0xda, 0xdb, 0x5b, 0x4b, 0xde, 0x75, 0x2d, 0x61, 0xc4, 0x1f, 0x67, 0x95, 0x9e, 0xd0, 0xb1, 0xb5, 0xb5, 0xb7, 0xb6, 0x96, 0xbc, 0xeb, 0x5a,
0xe7, 0xd2, 0xc7, 0x58, 0xbd, 0xbd, 0xbd, 0xf5, 0x3f, 0x48, 0x20, 0xb7, 0xf4, 0x91, 0x88, 0xbc, 0xce, 0xa5, 0x8f, 0xb0, 0x7a, 0x7b, 0x7b, 0xeb, 0x7f, 0x90, 0x40, 0x6e, 0xe9, 0x23, 0x11, 0x79,
0x1d, 0x31, 0x48, 0x93, 0x34, 0x07, 0x96, 0x5e, 0x73, 0x58, 0x7d, 0x2f, 0x92, 0xc7, 0xd2, 0xc7, 0x3b, 0x62, 0x90, 0x26, 0x69, 0x0e, 0x2c, 0xbd, 0xea, 0xb0, 0xfa, 0x5e, 0x24, 0x8f, 0xa5, 0x8f,
0x3e, 0xae, 0x9f, 0x18, 0xfe, 0x01, 0x56, 0x53, 0xbd, 0xdb, 0xe8, 0x9a, 0x62, 0x5c, 0xaf, 0xde, 0x7d, 0x5c, 0x3f, 0x31, 0xfc, 0x7d, 0xac, 0xa6, 0x7a, 0xb7, 0xd1, 0x35, 0xc5, 0xb8, 0x5e, 0xbd,
0x57, 0xcf, 0xae, 0xe5, 0xb3, 0xa1, 0x65, 0x2a, 0xfb, 0xcd, 0x77, 0x19, 0x24, 0x12, 0xc2, 0x54, 0xaf, 0x9e, 0x5d, 0xcb, 0x67, 0x43, 0xcb, 0x54, 0xf6, 0x9b, 0xef, 0x32, 0x48, 0x24, 0x84, 0xa9,
0xf0, 0x7f, 0x4c, 0xc7, 0x58, 0x4c, 0x66, 0x44, 0x77, 0x46, 0x8d, 0x03, 0xbc, 0xc9, 0x66, 0x13, 0xe0, 0xff, 0x98, 0x8e, 0xb1, 0x98, 0xcc, 0x88, 0xee, 0xac, 0x1a, 0x07, 0x78, 0x93, 0x9d, 0x4b,
0x81, 0x81, 0x18, 0xe0, 0x2d, 0x19, 0x78, 0x78, 0x62, 0x1f, 0x68, 0x22, 0xe5, 0x25, 0x57, 0x3a, 0x04, 0x06, 0x62, 0x80, 0xb7, 0x64, 0xe0, 0xe1, 0x89, 0x7d, 0xa0, 0x89, 0x94, 0x97, 0x5c, 0xe9,
0x04, 0xf3, 0x47, 0x19, 0xbf, 0x87, 0x57, 0xdb, 0x07, 0x9b, 0xe8, 0xc2, 0x29, 0x66, 0xdd, 0xfc, 0x10, 0xcc, 0x1f, 0x65, 0xfc, 0x1e, 0x5e, 0x6d, 0x1f, 0x6c, 0xa2, 0x0b, 0xa7, 0x98, 0x75, 0xf3,
0x62, 0x95, 0x55, 0xb3, 0x7e, 0xc0, 0x6b, 0x6c, 0x6a, 0x7f, 0xe8, 0xba, 0xa8, 0x35, 0x9c, 0xe1, 0x0b, 0x55, 0x56, 0xcd, 0xfa, 0x01, 0xaf, 0xb1, 0xa9, 0xfd, 0xa1, 0xeb, 0xa2, 0xd6, 0x70, 0x86,
0x73, 0x6c, 0xe6, 0x66, 0x80, 0x27, 0x21, 0xba, 0x06, 0x3d, 0xcb, 0x03, 0x0e, 0x9f, 0x65, 0xe7, 0xcf, 0xb1, 0xd9, 0x9b, 0x01, 0x9e, 0x84, 0xe8, 0x1a, 0xf4, 0x2c, 0x0f, 0x38, 0xfc, 0x1c, 0x9b,
0x5a, 0x2a, 0x08, 0xd0, 0x35, 0x1b, 0x42, 0xfa, 0xe8, 0x41, 0x89, 0xcf, 0x33, 0xd8, 0xc3, 0x68, 0x6e, 0xa9, 0x20, 0x40, 0xd7, 0x6c, 0x08, 0xe9, 0xa3, 0x07, 0x25, 0x3e, 0xcf, 0x60, 0x0f, 0xa3,
0x20, 0xb5, 0x96, 0x2a, 0x68, 0x63, 0x20, 0xd1, 0x83, 0x32, 0xbf, 0xc0, 0xe6, 0x5a, 0xca, 0xf7, 0x81, 0xd4, 0x5a, 0xaa, 0xa0, 0x8d, 0x81, 0x44, 0x0f, 0xca, 0xfc, 0x02, 0x9b, 0x6b, 0x29, 0xdf,
0xd1, 0x35, 0x52, 0x05, 0x3b, 0xca, 0xac, 0x9f, 0x48, 0x6d, 0x34, 0x54, 0x48, 0x6c, 0xc7, 0xf7, 0x47, 0xd7, 0x48, 0x15, 0xec, 0x28, 0xb3, 0x7e, 0x22, 0xb5, 0xd1, 0x50, 0x21, 0xb1, 0x1d, 0xdf,
0xb1, 0x2f, 0xfc, 0xd5, 0xa8, 0x3f, 0x1c, 0x60, 0x60, 0x60, 0x82, 0x64, 0x24, 0x60, 0x5b, 0x0e, 0xc7, 0xbe, 0xf0, 0x57, 0xa3, 0xfe, 0x70, 0x80, 0x81, 0x81, 0x09, 0x92, 0x91, 0x80, 0x6d, 0x39,
0x30, 0x20, 0x49, 0x30, 0x55, 0x40, 0xad, 0xb5, 0x14, 0x5b, 0x38, 0xcb, 0x1f, 0x60, 0xe7, 0x13, 0xc0, 0x80, 0x24, 0xc1, 0x54, 0x01, 0xb5, 0xd6, 0x52, 0x6c, 0xe1, 0x2c, 0x7f, 0x80, 0x9d, 0x4f,
0xb4, 0xa0, 0x40, 0x0c, 0x10, 0xaa, 0x7c, 0x86, 0xd5, 0x12, 0xd2, 0xc1, 0xee, 0xde, 0xd3, 0xc0, 0xd0, 0x82, 0x02, 0x31, 0x40, 0xa8, 0xf2, 0x59, 0x56, 0x4b, 0x48, 0x07, 0xbb, 0x7b, 0x4f, 0x03,
0x0a, 0x12, 0xba, 0xea, 0x6e, 0x17, 0x5d, 0x15, 0x79, 0x50, 0x2b, 0x98, 0xf0, 0x0c, 0xba, 0x46, 0x2b, 0x48, 0xe8, 0xaa, 0xbb, 0x5d, 0x74, 0x55, 0xe4, 0x41, 0xad, 0x60, 0xc2, 0x33, 0xe8, 0x1a,
0x45, 0x9d, 0x36, 0xd4, 0xc9, 0xe0, 0x04, 0xdc, 0x47, 0x11, 0xb9, 0x47, 0x5d, 0xd4, 0x43, 0xdf, 0x15, 0x75, 0xda, 0x50, 0x27, 0x83, 0x13, 0x70, 0x1f, 0x45, 0xe4, 0x1e, 0x75, 0x51, 0x0f, 0x7d,
0xc0, 0x39, 0x0e, 0xac, 0xbe, 0x21, 0x7d, 0xdc, 0x51, 0x66, 0x43, 0x0d, 0x03, 0x0f, 0xa6, 0xf9, 0x03, 0xd3, 0x1c, 0x58, 0x7d, 0x43, 0xfa, 0xb8, 0xa3, 0xcc, 0x86, 0x1a, 0x06, 0x1e, 0xcc, 0xf0,
0x34, 0x63, 0xdb, 0x68, 0x44, 0x12, 0x81, 0x19, 0x52, 0xdb, 0x12, 0xee, 0x11, 0x26, 0x00, 0xf0, 0x19, 0xc6, 0xb6, 0xd1, 0x88, 0x24, 0x02, 0xb3, 0xa4, 0xb6, 0x25, 0xdc, 0x23, 0x4c, 0x00, 0xe0,
0x05, 0xc6, 0x5b, 0x22, 0x08, 0x94, 0x69, 0x45, 0x28, 0x0c, 0x6e, 0xd8, 0x4a, 0x87, 0x59, 0x32, 0x0b, 0x8c, 0xb7, 0x44, 0x10, 0x28, 0xd3, 0x8a, 0x50, 0x18, 0xdc, 0xb0, 0x95, 0x0e, 0xe7, 0xc8,
0x67, 0x0c, 0x97, 0x3e, 0x02, 0xcf, 0xb9, 0xdb, 0xe8, 0x63, 0xc6, 0x3d, 0x97, 0x73, 0x27, 0x38, 0x9c, 0x31, 0x5c, 0xfa, 0x08, 0x3c, 0xe7, 0x6e, 0xa3, 0x8f, 0x19, 0xf7, 0x5c, 0xce, 0x9d, 0xe0,
0x71, 0xcf, 0x93, 0xf1, 0x6b, 0x43, 0xe9, 0x7b, 0x36, 0x24, 0xf1, 0xb3, 0x9c, 0x27, 0x1b, 0x13, 0xc4, 0x3d, 0x4f, 0xc6, 0xaf, 0x0d, 0xa5, 0xef, 0xd9, 0x90, 0xc4, 0xcf, 0x72, 0x9e, 0x6c, 0x4c,
0xe3, 0x77, 0xb6, 0x3a, 0xfb, 0x07, 0xb0, 0xc0, 0xcf, 0xb3, 0xd9, 0x04, 0xd9, 0x46, 0x13, 0x49, 0x8c, 0xdf, 0xd9, 0xea, 0xec, 0x1f, 0xc0, 0x02, 0x3f, 0xcf, 0xce, 0x25, 0xc8, 0x36, 0x9a, 0x48,
0xd7, 0x06, 0xef, 0x02, 0x99, 0xba, 0x3b, 0x34, 0xbb, 0x87, 0xdb, 0x38, 0x50, 0xd1, 0x08, 0x1a, 0xba, 0x36, 0x78, 0x17, 0xc8, 0xd4, 0xdd, 0xa1, 0xd9, 0x3d, 0xdc, 0xc6, 0x81, 0x8a, 0x46, 0xd0,
0xf4, 0xa0, 0x56, 0x52, 0xfa, 0x44, 0xf0, 0x00, 0x69, 0x58, 0x1f, 0x84, 0x66, 0x94, 0x87, 0x17, 0xa0, 0x07, 0xb5, 0x92, 0xd2, 0x27, 0x82, 0x07, 0x48, 0xc3, 0xfa, 0x20, 0x34, 0xa3, 0x3c, 0xbc,
0x2e, 0xf2, 0x4b, 0xec, 0xc2, 0xcd, 0xd0, 0x13, 0x06, 0x3b, 0x03, 0x6a, 0x43, 0x07, 0x42, 0xdf, 0x70, 0x91, 0x5f, 0x62, 0x17, 0x6e, 0x86, 0x9e, 0x30, 0xd8, 0x19, 0x50, 0x1b, 0x3a, 0x10, 0xfa,
0x21, 0x77, 0x87, 0x11, 0xc2, 0x25, 0x7e, 0x91, 0x2d, 0x8c, 0xbf, 0x45, 0x16, 0xac, 0xcb, 0x74, 0x0e, 0xb9, 0x3b, 0x8c, 0x10, 0x2e, 0xf1, 0x8b, 0x6c, 0x61, 0xfc, 0x2d, 0xb2, 0x60, 0x5d, 0xa6,
0x31, 0xf6, 0xb6, 0x15, 0xa1, 0x87, 0x81, 0x91, 0xc2, 0x4f, 0x2f, 0x5e, 0xc9, 0xa5, 0xde, 0x4b, 0x8b, 0xb1, 0xb7, 0xad, 0x08, 0x3d, 0x0c, 0x8c, 0x14, 0x7e, 0x7a, 0xf1, 0x4a, 0x2e, 0xf5, 0x5e,
0x7c, 0x90, 0x88, 0xb1, 0xe7, 0xf7, 0x12, 0xaf, 0xf2, 0x06, 0x9b, 0xdf, 0x44, 0x73, 0x2f, 0x65, 0xe2, 0x83, 0x44, 0x8c, 0x3d, 0xbf, 0x97, 0x78, 0x95, 0x37, 0xd8, 0xfc, 0x26, 0x9a, 0x7b, 0x29,
0x91, 0x28, 0x5b, 0x52, 0x5b, 0xd2, 0x4d, 0x8d, 0x91, 0x4e, 0x29, 0x0f, 0x71, 0xce, 0xa6, 0x37, 0x8b, 0x44, 0xd9, 0x92, 0xda, 0x92, 0x6e, 0x6a, 0x8c, 0x74, 0x4a, 0x79, 0x88, 0x73, 0x36, 0xb3,
0xd1, 0x10, 0x98, 0x62, 0x4b, 0x14, 0xa7, 0xd8, 0xbc, 0xae, 0xf2, 0x31, 0x85, 0xff, 0x8f, 0x62, 0x89, 0x86, 0xc0, 0x14, 0x5b, 0xa2, 0x38, 0xc5, 0xe6, 0x75, 0x95, 0x8f, 0x29, 0xfc, 0x7f, 0x14,
0xd0, 0x8e, 0x54, 0x58, 0x04, 0x1f, 0x26, 0x37, 0x77, 0x43, 0x8c, 0x84, 0x41, 0x92, 0x51, 0xa4, 0x83, 0x76, 0xa4, 0xc2, 0x22, 0xf8, 0x30, 0xb9, 0xb9, 0x1b, 0x62, 0x24, 0x0c, 0x92, 0x8c, 0x22,
0x3d, 0x42, 0x72, 0xf6, 0x91, 0x22, 0x50, 0x84, 0xff, 0x3f, 0x87, 0x8b, 0x5a, 0xdf, 0x41, 0x39, 0xed, 0x11, 0x92, 0xb3, 0x8f, 0x14, 0x81, 0x22, 0xfc, 0xff, 0x39, 0x5c, 0xd4, 0xfa, 0x36, 0xca,
0x9c, 0x70, 0x63, 0xdc, 0x43, 0x53, 0xd2, 0x32, 0x79, 0x9d, 0x28, 0xc9, 0xea, 0x3f, 0x25, 0xbe, 0xe1, 0x84, 0x1b, 0xe3, 0x1e, 0x9a, 0x92, 0x96, 0xc9, 0xeb, 0x44, 0x49, 0x56, 0xff, 0x29, 0xf1,
0x93, 0x52, 0x25, 0xbe, 0xb7, 0x19, 0x89, 0xc0, 0xa4, 0x78, 0x93, 0x3f, 0xc4, 0xae, 0x74, 0xf1, 0xed, 0x94, 0x2a, 0xf1, 0xbd, 0xcd, 0x48, 0x04, 0x26, 0xc5, 0x9b, 0xfc, 0x21, 0x76, 0xa5, 0x8b,
0x30, 0x42, 0x7d, 0xb4, 0xa7, 0x7c, 0xe9, 0x8e, 0x3a, 0xc1, 0xa1, 0xca, 0x52, 0x92, 0x58, 0xde, 0x87, 0x11, 0xea, 0xa3, 0x3d, 0xe5, 0x4b, 0x77, 0xd4, 0x09, 0x0e, 0x55, 0x96, 0x92, 0xc4, 0xf2,
0x45, 0x96, 0x50, 0x58, 0x62, 0x7a, 0x0a, 0x3f, 0x4a, 0x31, 0xd9, 0x51, 0x66, 0x9f, 0xda, 0xe1, 0x0e, 0xb2, 0x84, 0xc2, 0x12, 0xd3, 0x53, 0xf8, 0x51, 0x8a, 0xc9, 0x8e, 0x32, 0xfb, 0xd4, 0x0e,
0x96, 0x6d, 0xb0, 0xf0, 0x18, 0x69, 0xd9, 0x51, 0x5d, 0x0c, 0x7d, 0xe9, 0x8a, 0xd5, 0x63, 0x21, 0xb7, 0x6c, 0x83, 0x85, 0xc7, 0x48, 0xcb, 0x8e, 0xea, 0x62, 0xe8, 0x4b, 0x57, 0xac, 0x1e, 0x0b,
0x7d, 0xd1, 0xf3, 0x11, 0x56, 0x28, 0x28, 0xfb, 0xd8, 0xa7, 0x92, 0xcd, 0xde, 0xf7, 0x1a, 0x3f, 0xe9, 0x8b, 0x9e, 0x8f, 0xb0, 0x42, 0x41, 0xd9, 0xc7, 0x3e, 0x95, 0x6c, 0xf6, 0xbe, 0xd7, 0xf8,
0xc7, 0xaa, 0x1b, 0x2a, 0x72, 0xb1, 0x8d, 0xc1, 0x08, 0x1e, 0xa7, 0x63, 0x57, 0x18, 0xdc, 0x92, 0x34, 0xab, 0x6e, 0xa8, 0xc8, 0xc5, 0x36, 0x06, 0x23, 0x78, 0x9c, 0x8e, 0x5d, 0x61, 0x70, 0x4b,
0x03, 0x69, 0xe0, 0x89, 0x58, 0xbc, 0x87, 0x9d, 0xf6, 0x8e, 0x32, 0xdb, 0xc2, 0xb8, 0x47, 0x70, 0x0e, 0xa4, 0x81, 0x27, 0x62, 0xf1, 0x1e, 0x76, 0xda, 0x3b, 0xca, 0x6c, 0x0b, 0xe3, 0x1e, 0xc1,
0x9d, 0x72, 0x90, 0xf6, 0x82, 0x96, 0x52, 0x91, 0xb7, 0xb3, 0x0a, 0x1e, 0xe7, 0xec, 0x5c, 0xbb, 0x75, 0xca, 0x41, 0xda, 0x0b, 0x5a, 0x4a, 0x45, 0xde, 0xce, 0x2a, 0x78, 0x9c, 0xb3, 0xe9, 0x76,
0xdd, 0xc5, 0x8f, 0x0f, 0x51, 0x9b, 0xae, 0x70, 0x11, 0xfe, 0x32, 0xd5, 0x74, 0x19, 0xb3, 0x79, 0xbb, 0x8b, 0x1f, 0x1d, 0xa2, 0x36, 0x5d, 0xe1, 0x22, 0xfc, 0x79, 0xaa, 0xe9, 0x32, 0x66, 0xf3,
0x49, 0xdb, 0x0d, 0x92, 0x98, 0xfc, 0xb4, 0xa3, 0x02, 0x84, 0x33, 0xbc, 0xce, 0xce, 0xde, 0x0c, 0x92, 0xb6, 0x1b, 0x24, 0x31, 0xf9, 0x69, 0x47, 0x05, 0x08, 0x67, 0x78, 0x9d, 0x9d, 0xbd, 0x19,
0xa4, 0xd6, 0x43, 0xf4, 0xc0, 0xa1, 0x9a, 0xec, 0x04, 0x7b, 0x91, 0xea, 0xd3, 0x20, 0x85, 0x12, 0x48, 0xad, 0x87, 0xe8, 0x81, 0x43, 0x35, 0xd9, 0x09, 0xf6, 0x22, 0xd5, 0xa7, 0x41, 0x0a, 0x25,
0x51, 0x37, 0x64, 0x20, 0xf5, 0x91, 0xed, 0x46, 0x8c, 0x4d, 0x26, 0xc5, 0x59, 0xe1, 0x55, 0x36, 0xa2, 0x6e, 0xc8, 0x40, 0xea, 0x23, 0xdb, 0x8d, 0x18, 0x9b, 0x4c, 0x8a, 0xb3, 0xc2, 0xab, 0x6c,
0xd1, 0x45, 0x13, 0x8d, 0x60, 0xa2, 0xf9, 0xbc, 0xc3, 0xea, 0x89, 0x47, 0xb1, 0x9e, 0x79, 0x06, 0xa2, 0x8b, 0x26, 0x1a, 0xc1, 0x44, 0xf3, 0x79, 0x87, 0xd5, 0x13, 0x8f, 0x62, 0x3d, 0xf3, 0x0c,
0xc5, 0x73, 0xae, 0x29, 0x2b, 0x0f, 0x87, 0x9a, 0xe4, 0x66, 0xa4, 0xee, 0xca, 0xa0, 0x0f, 0x25, 0x8a, 0xe7, 0x5c, 0x53, 0x56, 0x1e, 0x0e, 0x35, 0xc9, 0xcd, 0x48, 0xdd, 0x95, 0x41, 0x1f, 0x4a,
0x12, 0xbc, 0x8f, 0xc2, 0xb7, 0x4a, 0x6a, 0x6c, 0x6a, 0xc3, 0x1f, 0x5a, 0x8d, 0x15, 0xab, 0x9f, 0x24, 0x78, 0x1f, 0x85, 0x6f, 0x95, 0xd4, 0xd8, 0xd4, 0x86, 0x3f, 0xb4, 0x1a, 0x2b, 0x56, 0x3f,
0x0e, 0xc4, 0x36, 0x41, 0x24, 0x4a, 0xa7, 0x10, 0x3d, 0x98, 0xa4, 0x10, 0xc5, 0x45, 0x44, 0xb4, 0x1d, 0x88, 0x6d, 0x82, 0x48, 0x94, 0x4e, 0x21, 0x7a, 0x30, 0x49, 0x21, 0x8a, 0x8b, 0x88, 0x68,
0xa9, 0xe6, 0xfb, 0xd9, 0xcc, 0xa9, 0x7d, 0x84, 0x9f, 0x65, 0x95, 0x44, 0x35, 0xb0, 0xfa, 0x9a, 0x53, 0xcd, 0xf7, 0xb2, 0xd9, 0x53, 0xfb, 0x08, 0x3f, 0xcb, 0x2a, 0x89, 0x6a, 0x60, 0xf5, 0x35,
0x0c, 0x44, 0x34, 0x8a, 0x3b, 0x15, 0x78, 0x14, 0xbd, 0x0d, 0x5f, 0x09, 0x93, 0x00, 0xd8, 0x7c, 0x19, 0x88, 0x68, 0x14, 0x77, 0x2a, 0xf0, 0x28, 0x7a, 0x1b, 0xbe, 0x12, 0x26, 0x01, 0xb0, 0xf9,
0xa5, 0x6e, 0x17, 0x02, 0x7b, 0xf1, 0x1c, 0xab, 0xde, 0x0c, 0x3c, 0x3c, 0x94, 0x01, 0x7a, 0x70, 0xe9, 0x69, 0xbb, 0x10, 0xd8, 0x8b, 0xd3, 0xac, 0x7a, 0x33, 0xf0, 0xf0, 0x50, 0x06, 0xe8, 0xc1,
0xc6, 0x76, 0x90, 0xb8, 0xf6, 0xf2, 0x52, 0xa6, 0x70, 0x4f, 0x93, 0x31, 0x05, 0x0c, 0xa9, 0x0d, 0x19, 0xdb, 0x41, 0xe2, 0xda, 0xcb, 0x4b, 0x99, 0xc2, 0x3d, 0x43, 0xc6, 0x14, 0x30, 0xa4, 0x36,
0xdc, 0x10, 0xba, 0x00, 0x1d, 0x52, 0x16, 0xb4, 0xed, 0xba, 0xd9, 0x2b, 0x5e, 0xef, 0xdb, 0x2c, 0x70, 0x43, 0xe8, 0x02, 0x74, 0x48, 0x59, 0xd0, 0xb6, 0xeb, 0x66, 0xaf, 0x78, 0xbd, 0x6f, 0xb3,
0x38, 0x52, 0x77, 0x73, 0x4c, 0xc3, 0x11, 0x69, 0xda, 0x44, 0xb3, 0x3f, 0xd2, 0x06, 0x07, 0x2d, 0xe0, 0x48, 0xdd, 0xcd, 0x31, 0x0d, 0x47, 0xa4, 0x69, 0x13, 0xcd, 0xfe, 0x48, 0x1b, 0x1c, 0xb4,
0x15, 0x1c, 0xca, 0xbe, 0x06, 0x49, 0x9a, 0xb6, 0x94, 0xf0, 0x0a, 0xd7, 0x6f, 0x53, 0x1e, 0x76, 0x54, 0x70, 0x28, 0xfb, 0x1a, 0x24, 0x69, 0xda, 0x52, 0xc2, 0x2b, 0x5c, 0xbf, 0x4d, 0x79, 0xd8,
0xd1, 0x47, 0xa1, 0x8b, 0x52, 0xef, 0xd8, 0x1e, 0x6a, 0x4d, 0x5d, 0xf5, 0xa5, 0xd0, 0xe0, 0x93, 0x45, 0x1f, 0x85, 0x2e, 0x4a, 0xbd, 0x63, 0x7b, 0xa8, 0x35, 0x75, 0xd5, 0x97, 0x42, 0x83, 0x4f,
0x2b, 0x64, 0x65, 0x7c, 0x1c, 0xd0, 0xfb, 0xae, 0xfa, 0x06, 0xa3, 0xf8, 0x1c, 0xf0, 0x79, 0x36, 0xae, 0x90, 0x95, 0xf1, 0x71, 0x40, 0xef, 0xbb, 0xea, 0x1b, 0x8c, 0xe2, 0x73, 0x40, 0x56, 0xd8,
0x13, 0xf3, 0xef, 0x89, 0xc8, 0x48, 0x2b, 0xe4, 0x25, 0xc7, 0x66, 0x52, 0xa4, 0xc2, 0x1c, 0x7b, 0x73, 0x41, 0x88, 0xe2, 0xf3, 0x6c, 0x36, 0x16, 0xb2, 0x27, 0x22, 0x23, 0x2d, 0xf8, 0x92, 0x63,
0x99, 0x46, 0x56, 0xfd, 0x86, 0xd0, 0x39, 0xf4, 0x33, 0x87, 0x2f, 0xb0, 0xd9, 0xd4, 0xb5, 0x1c, 0xd3, 0x2b, 0x52, 0x61, 0x8e, 0xbd, 0x4c, 0x73, 0xac, 0x7e, 0x43, 0xe8, 0x1c, 0xfa, 0xa9, 0xc3,
0xff, 0xb9, 0xc3, 0xe7, 0xd8, 0x34, 0xb9, 0x96, 0x61, 0x1a, 0x7e, 0x61, 0x41, 0x72, 0xa2, 0x00, 0x17, 0xd8, 0xb9, 0xd4, 0xdf, 0x1c, 0xff, 0x99, 0xc3, 0xe7, 0xd8, 0x0c, 0xf9, 0x9b, 0x61, 0x1a,
0xfe, 0xd2, 0x4a, 0x48, 0xbc, 0x28, 0xe0, 0xbf, 0xb2, 0xca, 0x48, 0x42, 0x92, 0x44, 0x1a, 0x5e, 0x7e, 0x6e, 0x41, 0xf2, 0xac, 0x00, 0xfe, 0xc2, 0x4a, 0x48, 0x5c, 0x2b, 0xe0, 0xbf, 0xb4, 0xca,
0x75, 0xc8, 0xd2, 0x54, 0x59, 0x02, 0xc3, 0x6b, 0x96, 0x91, 0xa4, 0x66, 0x8c, 0xaf, 0x5b, 0xc6, 0x48, 0x42, 0x92, 0x59, 0x1a, 0x5e, 0x71, 0xc8, 0xd2, 0x54, 0x59, 0x02, 0xc3, 0xab, 0x96, 0x91,
0x44, 0x66, 0x86, 0xbe, 0x61, 0xd1, 0x1b, 0x22, 0xf0, 0xd4, 0xe1, 0x61, 0x86, 0xbe, 0xe9, 0xf0, 0xa4, 0x66, 0x8c, 0xaf, 0x59, 0xc6, 0x44, 0x66, 0x86, 0xbe, 0x6e, 0xd1, 0x1b, 0x22, 0xf0, 0xd4,
0x06, 0x9b, 0xa3, 0xeb, 0x6b, 0xc2, 0x17, 0x81, 0x9b, 0xf3, 0xbf, 0xe5, 0xf0, 0xf3, 0x0c, 0x4e, 0xe1, 0x61, 0x86, 0xbe, 0xe1, 0xf0, 0x06, 0x9b, 0xa3, 0xeb, 0x6b, 0xc2, 0x17, 0x81, 0x9b, 0xf3,
0xa9, 0xd3, 0xf0, 0x5c, 0x89, 0x43, 0x1a, 0x5f, 0x5b, 0x47, 0xf0, 0xe5, 0x92, 0x8d, 0x55, 0xc2, 0xbf, 0xe9, 0xf0, 0xf3, 0x0c, 0x4e, 0xa9, 0xd3, 0xf0, 0x5c, 0x89, 0x43, 0x1a, 0x74, 0x5b, 0x5c,
0x18, 0x63, 0x5f, 0x29, 0xf1, 0xe9, 0x38, 0xe8, 0xf1, 0xf9, 0xc5, 0x12, 0xaf, 0xb1, 0xc9, 0x4e, 0xf0, 0xa5, 0x92, 0x8d, 0x55, 0xc2, 0x18, 0x63, 0x5f, 0x2e, 0xf1, 0x99, 0xf8, 0x25, 0xe2, 0xf3,
0xa0, 0x31, 0x32, 0xf0, 0x59, 0xca, 0xef, 0xc9, 0xb8, 0x1f, 0xc3, 0xe7, 0xa8, 0xa2, 0x26, 0x6c, 0x8b, 0x25, 0x5e, 0x63, 0x93, 0x9d, 0x40, 0x63, 0x64, 0xe0, 0x33, 0x94, 0xf4, 0x93, 0x71, 0x93,
0x7e, 0xc3, 0xe7, 0x69, 0xd6, 0xf3, 0x2e, 0x6a, 0x0c, 0xbc, 0x42, 0xed, 0x68, 0xf8, 0x82, 0xbd, 0x86, 0xcf, 0x52, 0x99, 0x4d, 0xd8, 0xa4, 0x87, 0xcf, 0xd1, 0x02, 0xc0, 0xbb, 0xa8, 0x31, 0xf0,
0x11, 0x0f, 0x53, 0xf8, 0x5b, 0xd9, 0x86, 0xa6, 0x38, 0x59, 0xff, 0x5e, 0x26, 0x13, 0x36, 0xd1, 0x0a, 0x05, 0xa5, 0xe1, 0xf3, 0xf6, 0x46, 0x3c, 0x61, 0xe1, 0xaf, 0x65, 0x1b, 0x9a, 0xe2, 0xb8,
0xe4, 0x95, 0x0d, 0xff, 0x28, 0xf3, 0x8b, 0xec, 0x7c, 0x8a, 0xd9, 0x39, 0x97, 0xd5, 0xf4, 0x3f, 0xfd, 0x5b, 0x99, 0x4c, 0xd8, 0x44, 0x93, 0x97, 0x3b, 0xfc, 0xbd, 0xcc, 0x2f, 0xb2, 0xf3, 0x29,
0xcb, 0xfc, 0x32, 0xbb, 0x40, 0x4d, 0x3f, 0xcb, 0x1b, 0xba, 0x24, 0xb5, 0x91, 0xae, 0x86, 0x57, 0x66, 0x87, 0x5f, 0x56, 0xe8, 0xff, 0x28, 0xf3, 0xcb, 0xec, 0x02, 0x4d, 0x82, 0x2c, 0x0f, 0xe8,
0xca, 0xfc, 0x12, 0x5b, 0xd8, 0x44, 0x93, 0xbd, 0x47, 0x81, 0xf8, 0xaf, 0x32, 0x3f, 0xc7, 0xce, 0x92, 0xd4, 0x46, 0xba, 0x1a, 0xfe, 0x59, 0xe6, 0x97, 0xd8, 0xc2, 0x26, 0x9a, 0xec, 0x3d, 0x0a,
0x52, 0xd5, 0x4b, 0x3c, 0x46, 0x78, 0xb5, 0x4c, 0x8f, 0x9a, 0x1e, 0x13, 0x73, 0x5e, 0x2b, 0x53, 0xc4, 0x7f, 0x95, 0xf9, 0x34, 0x3b, 0x4b, 0xad, 0x40, 0xe2, 0x31, 0xc2, 0x2b, 0x65, 0x7a, 0xd4,
0xa8, 0x3f, 0x44, 0x3d, 0xaa, 0x3d, 0x68, 0x1d, 0x89, 0x20, 0x40, 0x5f, 0xc3, 0xeb, 0x65, 0x0a, 0xf4, 0x98, 0x98, 0xf3, 0x6a, 0x99, 0x42, 0xfd, 0x01, 0x6a, 0x5c, 0xed, 0x41, 0xeb, 0x48, 0x04,
0x68, 0x17, 0x07, 0xea, 0x18, 0x0b, 0xf0, 0x1b, 0xd6, 0x69, 0xcb, 0xfc, 0xc1, 0x21, 0x46, 0xa3, 0x01, 0xfa, 0x1a, 0x5e, 0x2b, 0x53, 0x40, 0xbb, 0x38, 0x50, 0xc7, 0x58, 0x80, 0x5f, 0xb7, 0x4e,
0x8c, 0xf0, 0x66, 0x99, 0x9e, 0x26, 0xe6, 0x1f, 0xa7, 0xbc, 0x55, 0xe6, 0x57, 0x58, 0x23, 0x6e, 0x5b, 0xe6, 0xf7, 0x0f, 0x31, 0x1a, 0x65, 0x84, 0x37, 0xca, 0xf4, 0x34, 0x31, 0xff, 0x38, 0xe5,
0x16, 0xe9, 0xc3, 0x10, 0xb1, 0x8f, 0xd4, 0xac, 0xe1, 0xb9, 0x4a, 0x26, 0xb1, 0x8d, 0xbe, 0x11, 0xcd, 0x32, 0xbf, 0xc2, 0x1a, 0x71, 0x07, 0x49, 0x1f, 0x86, 0x88, 0x7d, 0xa4, 0x0e, 0x0e, 0xcf,
0xd9, 0xbd, 0x4f, 0x54, 0xc8, 0x2e, 0x2a, 0xae, 0xbc, 0x47, 0x6b, 0x78, 0xbe, 0x42, 0x2f, 0xba, 0x55, 0x32, 0x89, 0x6d, 0xf4, 0x8d, 0xc8, 0xee, 0x7d, 0xac, 0x42, 0x76, 0x51, 0xc5, 0xe5, 0x8d,
0x89, 0x26, 0x69, 0xd3, 0x1a, 0x3e, 0x49, 0xab, 0xd5, 0xf4, 0xcd, 0x40, 0x0f, 0x7b, 0x99, 0xa1, 0x5b, 0xc3, 0xf3, 0x15, 0x7a, 0xd1, 0x4d, 0x34, 0x49, 0xef, 0xd6, 0xf0, 0x71, 0xda, 0xb7, 0x66,
0xf0, 0xa9, 0xf4, 0x72, 0x5b, 0x6a, 0x13, 0xc9, 0xde, 0xd0, 0x66, 0xfa, 0xa7, 0x2b, 0xe4, 0xd4, 0x6e, 0x06, 0x7a, 0xd8, 0xcb, 0x0c, 0x85, 0x4f, 0xa4, 0x97, 0xdb, 0x52, 0x9b, 0x48, 0xf6, 0x86,
0xfe, 0x28, 0x70, 0xc7, 0xe0, 0x17, 0xac, 0xcc, 0xc4, 0x36, 0x6b, 0xd4, 0xaf, 0x2b, 0x7c, 0x86, 0x36, 0xd3, 0x3f, 0x59, 0x21, 0xa7, 0xf6, 0x47, 0x81, 0x3b, 0x06, 0xbf, 0x60, 0x65, 0x26, 0xb6,
0xb1, 0xb8, 0xaa, 0x2d, 0xf0, 0x9b, 0x54, 0x1e, 0xed, 0x52, 0xc7, 0x18, 0xd9, 0x41, 0x03, 0xbf, 0x59, 0xa3, 0x7e, 0x55, 0xe1, 0xb3, 0x8c, 0xc5, 0xa5, 0x6e, 0x81, 0x5f, 0xa7, 0xf2, 0x68, 0xc1,
0xcd, 0x4c, 0x2c, 0xf4, 0x4e, 0xf8, 0x5d, 0x85, 0x82, 0x7e, 0x20, 0x07, 0x78, 0x20, 0xdd, 0x3b, 0x3a, 0xc6, 0xc8, 0x4e, 0x1f, 0xf8, 0x4d, 0x66, 0x62, 0xa1, 0xa1, 0xc2, 0x6f, 0x2b, 0x14, 0xf4,
0xf0, 0xd5, 0x2a, 0xd9, 0x67, 0x63, 0x42, 0x13, 0x21, 0xce, 0x91, 0xaf, 0x55, 0x29, 0xe5, 0x28, 0x03, 0x39, 0xc0, 0x03, 0xe9, 0xde, 0x81, 0xaf, 0x54, 0xc9, 0x3e, 0x1b, 0x13, 0x1a, 0x13, 0x71,
0x93, 0xe3, 0x94, 0xfb, 0xba, 0x3d, 0x27, 0xa3, 0xa0, 0xd3, 0x86, 0x6f, 0xd0, 0x4e, 0xc7, 0x92, 0x8e, 0x7c, 0xb5, 0x4a, 0x29, 0x47, 0x99, 0x1c, 0xa7, 0xdc, 0xd7, 0xec, 0x39, 0x99, 0x0f, 0x9d,
0xf3, 0xc1, 0xfe, 0x2e, 0x7c, 0xb3, 0x4a, 0xaa, 0x56, 0x7d, 0x5f, 0xb9, 0xc2, 0x64, 0xf5, 0xf4, 0x36, 0x7c, 0x9d, 0x16, 0x3d, 0x96, 0x9c, 0x0f, 0xf6, 0x77, 0xe1, 0x1b, 0x55, 0x52, 0xb5, 0xea,
0xad, 0x2a, 0x15, 0x64, 0x41, 0x7b, 0xf2, 0xee, 0xdf, 0xae, 0x5a, 0x47, 0x63, 0xdc, 0xa6, 0x6b, 0xfb, 0xca, 0x15, 0x26, 0xab, 0xa7, 0x6f, 0x56, 0xa9, 0x20, 0x0b, 0xda, 0x93, 0x77, 0xff, 0x56,
0x9b, 0xda, 0xea, 0x77, 0xac, 0x54, 0x9a, 0x41, 0x64, 0xc9, 0x81, 0x81, 0xef, 0x5a, 0xbe, 0xd3, 0xd5, 0x3a, 0x1a, 0xe3, 0x36, 0x5d, 0xdb, 0xd4, 0x6b, 0xbf, 0x6d, 0xa5, 0xd2, 0x60, 0x22, 0x4b,
0x6b, 0x0a, 0xfc, 0xbe, 0x96, 0x64, 0x68, 0x01, 0xfb, 0x43, 0x2d, 0xae, 0xb0, 0xf1, 0xbd, 0x04, 0x0e, 0x0c, 0x7c, 0xc7, 0xf2, 0x9d, 0xde, 0x5d, 0xe0, 0x77, 0xb5, 0x24, 0x43, 0x0b, 0xd8, 0xef,
0xfe, 0x68, 0xe1, 0xd3, 0xbb, 0x0c, 0xfc, 0xa9, 0x46, 0x86, 0x15, 0xd7, 0x11, 0x5a, 0xca, 0x35, 0x6b, 0x71, 0x85, 0x8d, 0x2f, 0x2b, 0xf0, 0x07, 0x0b, 0x9f, 0x5e, 0x70, 0xe0, 0x8f, 0x35, 0x32,
0xfc, 0xb9, 0x46, 0x16, 0xe4, 0x8b, 0x07, 0x7c, 0xaf, 0x4e, 0xc1, 0x4a, 0x57, 0x0e, 0xf8, 0x7e, 0xac, 0xb8, 0xa3, 0xd0, 0xa6, 0xae, 0xe1, 0x4f, 0x35, 0xb2, 0x20, 0xdf, 0x46, 0xe0, 0xbb, 0x75,
0x9d, 0xdc, 0x3c, 0xb5, 0x6c, 0xc0, 0x0f, 0xea, 0xf6, 0x39, 0xb2, 0x35, 0x03, 0x7e, 0x58, 0x00, 0x0a, 0x56, 0xba, 0x87, 0xc0, 0xf7, 0xea, 0xe4, 0xe6, 0xa9, 0x0d, 0x04, 0xbe, 0x5f, 0xb7, 0xcf,
0x88, 0x0b, 0x7e, 0x54, 0xb7, 0x3d, 0x6c, 0x6c, 0xb5, 0x80, 0x1f, 0xd7, 0xc9, 0xb6, 0xd3, 0x4b, 0x91, 0xed, 0x1e, 0xf0, 0x83, 0x02, 0x40, 0x5c, 0xf0, 0xc3, 0xba, 0xed, 0x61, 0x63, 0xfb, 0x06,
0x05, 0xfc, 0xa4, 0x1e, 0x3f, 0x77, 0xb6, 0x4e, 0xc0, 0x4f, 0xeb, 0x54, 0x43, 0xf7, 0x5f, 0x24, 0xfc, 0xa8, 0x4e, 0xb6, 0x9d, 0xde, 0x34, 0xe0, 0xc7, 0xf5, 0xf8, 0xb9, 0xb3, 0x1d, 0x03, 0x7e,
0xe0, 0x25, 0xab, 0x2b, 0x5f, 0x21, 0xe0, 0xe5, 0x7a, 0x73, 0x89, 0x4d, 0xb5, 0xb5, 0x6f, 0x27, 0x52, 0xa7, 0x1a, 0xba, 0xff, 0x76, 0x01, 0x2f, 0x59, 0x5d, 0xf9, 0x5e, 0x01, 0x2f, 0xd7, 0x9b,
0xcf, 0x14, 0x2b, 0xb7, 0xb5, 0x0f, 0x67, 0xa8, 0x51, 0xaf, 0x29, 0xe5, 0xaf, 0x9f, 0x84, 0xd1, 0x4b, 0x6c, 0xaa, 0xad, 0x7d, 0x3b, 0x8e, 0xa6, 0x58, 0xb9, 0xad, 0x7d, 0x38, 0x43, 0xdd, 0x7b,
0x33, 0x4f, 0x80, 0xd3, 0x5c, 0x63, 0x33, 0x2d, 0x35, 0x08, 0x45, 0x56, 0xb0, 0x76, 0xd8, 0xc4, 0x4d, 0x29, 0x7f, 0xfd, 0x24, 0x8c, 0x9e, 0x79, 0x02, 0x9c, 0xe6, 0x1a, 0x9b, 0x6d, 0xa9, 0x41,
0x53, 0x0a, 0xbd, 0x38, 0x55, 0xce, 0x50, 0xb7, 0x5f, 0x3f, 0x41, 0x77, 0x68, 0x67, 0xa2, 0x43, 0x28, 0xb2, 0x82, 0xb5, 0x13, 0x28, 0x1e, 0x5d, 0xe8, 0xc5, 0xa9, 0x72, 0x86, 0x46, 0xc0, 0xfa,
0x47, 0xba, 0x44, 0x41, 0xf6, 0xa0, 0xd4, 0xfc, 0x30, 0x83, 0x96, 0x0a, 0xb4, 0xd4, 0x06, 0x03, 0x09, 0xba, 0x43, 0x3b, 0x28, 0x1d, 0x3a, 0xd2, 0x25, 0x0a, 0xb2, 0x07, 0xa5, 0xe6, 0x07, 0x19,
0x77, 0xb4, 0x85, 0xc7, 0xe8, 0xdb, 0xc9, 0x6b, 0x22, 0x15, 0xf4, 0xe1, 0x8c, 0xfd, 0x6e, 0x41, 0xb4, 0x54, 0xa0, 0xa5, 0x36, 0x18, 0xb8, 0xa3, 0x2d, 0x3c, 0x46, 0xdf, 0x8e, 0x63, 0x13, 0xa9,
0xfb, 0xfd, 0x11, 0xcf, 0xe7, 0x35, 0xda, 0x4d, 0xec, 0xc7, 0xc9, 0x34, 0x63, 0xeb, 0xc7, 0x18, 0xa0, 0x0f, 0x67, 0xec, 0xc7, 0x0c, 0xda, 0x8f, 0x92, 0x78, 0x68, 0xaf, 0xd1, 0xc2, 0x62, 0xbf,
0x98, 0xa1, 0xf0, 0xfd, 0x11, 0x94, 0xe9, 0xdc, 0x1a, 0x6a, 0xa3, 0x06, 0xf2, 0x59, 0x1a, 0xd3, 0x58, 0x66, 0x18, 0x5b, 0x3f, 0xc6, 0xc0, 0x0c, 0x85, 0xef, 0x8f, 0xa0, 0x4c, 0xe7, 0xd6, 0x50,
0xcd, 0xcf, 0x38, 0xac, 0x16, 0x0f, 0xe3, 0xcc, 0xb4, 0xf8, 0xb8, 0x87, 0x81, 0x27, 0xad, 0x70, 0x1b, 0x35, 0x90, 0xcf, 0xd2, 0xec, 0x6e, 0x7e, 0xca, 0x61, 0xb5, 0x78, 0x42, 0x67, 0xa6, 0xc5,
0xda, 0xad, 0x2d, 0x94, 0x6c, 0x10, 0x4e, 0xce, 0xb4, 0x6f, 0x44, 0x64, 0x2d, 0xb4, 0x9f, 0x14, 0xc7, 0x3d, 0x0c, 0x3c, 0x69, 0x85, 0xd3, 0xc2, 0x6d, 0xa1, 0x64, 0xad, 0x70, 0x72, 0xa6, 0x7d,
0xc9, 0xbd, 0xc8, 0xda, 0xe9, 0xc1, 0x44, 0x0e, 0xe6, 0xbe, 0x4c, 0xd2, 0x12, 0x59, 0x14, 0xb7, 0x23, 0x22, 0x6b, 0xa1, 0xfd, 0xce, 0x48, 0xee, 0x45, 0xd6, 0x4e, 0x0f, 0x26, 0x72, 0x30, 0xf7,
0x1a, 0x78, 0x2d, 0x1f, 0x05, 0xcd, 0xeb, 0xa9, 0xe6, 0x75, 0xc6, 0xf2, 0x4f, 0x4a, 0x6b, 0x6b, 0x65, 0x92, 0x36, 0xcb, 0xa2, 0xb8, 0xd5, 0xc0, 0x6b, 0xf9, 0x28, 0x68, 0x88, 0x4f, 0x35, 0xaf,
0x3e, 0x22, 0xcf, 0x90, 0xc7, 0x9b, 0xbe, 0xea, 0x09, 0x1f, 0x1c, 0xda, 0x10, 0xec, 0x83, 0x97, 0x33, 0x96, 0x7f, 0x67, 0x5a, 0x5b, 0xf3, 0x91, 0x77, 0x86, 0x3c, 0xde, 0xf4, 0x55, 0x4f, 0xf8,
0x9a, 0x2f, 0x4c, 0xb0, 0x99, 0x53, 0x1f, 0x90, 0x64, 0x72, 0x76, 0x58, 0xf5, 0xe9, 0x55, 0xae, 0xe0, 0xd0, 0xda, 0x60, 0x1f, 0xbc, 0xd4, 0x7c, 0x61, 0x82, 0xcd, 0x9e, 0xfa, 0xaa, 0x24, 0x93,
0xb0, 0x07, 0x32, 0xe4, 0x9e, 0x95, 0xc0, 0xa1, 0xa5, 0x33, 0x23, 0x9f, 0xda, 0x0d, 0x4a, 0xfc, 0xb3, 0xc3, 0xaa, 0x4f, 0xaf, 0x72, 0x85, 0x3d, 0x90, 0x21, 0xf7, 0xec, 0x09, 0x0e, 0x6d, 0xa2,
0x2a, 0xbb, 0x94, 0x13, 0xef, 0xdd, 0x08, 0xa8, 0x2d, 0x37, 0x32, 0x86, 0xd3, 0xab, 0x41, 0x85, 0x19, 0xf9, 0xd4, 0xc2, 0x50, 0xe2, 0x57, 0xd9, 0xa5, 0x9c, 0x78, 0xef, 0x9a, 0x40, 0x6d, 0xb9,
0xa2, 0x95, 0x51, 0xa9, 0xd2, 0xe3, 0xcf, 0xbd, 0xfc, 0x6b, 0x37, 0x1e, 0x79, 0x30, 0x49, 0x5f, 0x91, 0x31, 0x9c, 0xde, 0x17, 0x2a, 0x14, 0xad, 0x8c, 0x4a, 0x95, 0x1e, 0x7f, 0x03, 0xe6, 0x9f,
0x60, 0xb9, 0x8d, 0x59, 0xca, 0xc0, 0x14, 0xc5, 0x31, 0x23, 0x24, 0xe3, 0xe8, 0xec, 0x18, 0x98, 0xc0, 0xf1, 0xc8, 0x83, 0x49, 0xfa, 0x2c, 0xcb, 0x6d, 0xcc, 0x52, 0x06, 0xa6, 0x28, 0x8e, 0x19,
0x8c, 0xa5, 0x2a, 0x05, 0x37, 0x03, 0xa9, 0x1f, 0xe5, 0xad, 0x80, 0xd1, 0x77, 0xc1, 0xa9, 0x10, 0x21, 0x19, 0x47, 0x67, 0xc7, 0xc0, 0x64, 0x2c, 0x55, 0x29, 0xb8, 0x19, 0x48, 0xfd, 0x28, 0x6f,
0xc4, 0x3d, 0xa7, 0x36, 0x46, 0xb1, 0x58, 0x1b, 0x8d, 0x90, 0x3e, 0xd4, 0x69, 0x07, 0x1a, 0x8b, 0x05, 0x8c, 0x3e, 0x16, 0x4e, 0x85, 0x20, 0xee, 0x39, 0xb5, 0x31, 0x8a, 0xc5, 0xda, 0x68, 0x84,
0x4b, 0x7c, 0xe3, 0xdc, 0x98, 0xf2, 0x64, 0xc2, 0x4d, 0xd3, 0xb6, 0x93, 0xaf, 0xec, 0x76, 0x36, 0xf4, 0xa1, 0x4e, 0x8b, 0xd1, 0x58, 0x5c, 0xe2, 0x1b, 0xd3, 0x63, 0xca, 0x93, 0x09, 0x37, 0x43,
0xce, 0x8c, 0x61, 0xb6, 0xf7, 0x01, 0x8c, 0xa9, 0x2b, 0x0c, 0x71, 0x98, 0x1d, 0x77, 0xd4, 0x26, 0x2b, 0x50, 0xbe, 0xc7, 0xdb, 0xd9, 0x38, 0x3b, 0x86, 0xd9, 0xde, 0x07, 0x30, 0xa6, 0xae, 0x30,
0x09, 0xf0, 0xb1, 0xe8, 0xc6, 0x76, 0xef, 0xde, 0x0d, 0x30, 0xd2, 0x47, 0x32, 0x84, 0xb9, 0xb1, 0xc4, 0xe1, 0xdc, 0xb8, 0xa3, 0x36, 0x49, 0x80, 0x8f, 0x45, 0x37, 0xb6, 0x7b, 0xf7, 0x6e, 0x80,
0xa0, 0xc5, 0xed, 0xc7, 0xe6, 0xc5, 0xfc, 0x58, 0x28, 0xc8, 0xf4, 0xfc, 0xd2, 0xf9, 0xf1, 0x07, 0x91, 0x3e, 0x92, 0x21, 0xcc, 0x8d, 0x05, 0x2d, 0x6e, 0x3f, 0x36, 0x2f, 0xe6, 0xc7, 0x42, 0x41,
0xb3, 0x0d, 0x20, 0xa7, 0x2e, 0x8c, 0x51, 0xb7, 0x45, 0x20, 0xfa, 0x05, 0x85, 0x17, 0xc6, 0x14, 0xa6, 0xe7, 0x97, 0xce, 0x8f, 0x3f, 0x98, 0x6d, 0x00, 0x39, 0x75, 0x61, 0x8c, 0xba, 0x2d, 0x02,
0x16, 0x3a, 0x4f, 0xa3, 0xb9, 0xce, 0xaa, 0xb6, 0x86, 0xec, 0x3f, 0x09, 0x54, 0x34, 0x81, 0xa4, 0xd1, 0x2f, 0x28, 0xbc, 0x30, 0xa6, 0xb0, 0xd0, 0x79, 0x1a, 0xcd, 0x75, 0x56, 0xb5, 0x35, 0x64,
0x1e, 0x28, 0x9f, 0x95, 0x69, 0x8d, 0xde, 0x40, 0xe1, 0x9b, 0xa3, 0x11, 0x38, 0xb4, 0x1d, 0xaf, 0xff, 0x5e, 0xa0, 0xa2, 0x09, 0x24, 0xf5, 0x40, 0xf9, 0xac, 0x4c, 0x6b, 0xf4, 0x06, 0x0a, 0xdf,
0xf6, 0x02, 0x15, 0x0d, 0x84, 0x0f, 0x25, 0x5b, 0xbe, 0x46, 0x04, 0xde, 0xda, 0x08, 0xca, 0xef, 0x1c, 0x8d, 0xc0, 0xa1, 0x95, 0x79, 0xb5, 0x17, 0xa8, 0x68, 0x20, 0x7c, 0x28, 0xd9, 0xf2, 0x35,
0x53, 0x6c, 0x36, 0xfb, 0xd7, 0xe4, 0x16, 0x9e, 0x98, 0x5b, 0xaa, 0x77, 0x9b, 0x5f, 0x5d, 0x89, 0x22, 0xf0, 0xd6, 0x46, 0x50, 0x7e, 0x8f, 0x62, 0xe7, 0xb2, 0xbf, 0x52, 0x6e, 0xe1, 0x89, 0xb9,
0xff, 0x09, 0x5d, 0x49, 0xff, 0x09, 0x5d, 0xd9, 0x46, 0xad, 0xc9, 0xb2, 0xd0, 0xa6, 0x59, 0xe3, 0xa5, 0x7a, 0xb7, 0xf9, 0xd5, 0x95, 0xf8, 0xef, 0xd1, 0x95, 0xf4, 0xef, 0xd1, 0x95, 0x6d, 0xd4,
0xaf, 0x53, 0xf6, 0xef, 0xa0, 0x87, 0xee, 0xff, 0x07, 0x5c, 0xe1, 0xef, 0x9d, 0xee, 0x4c, 0x58, 0x9a, 0x2c, 0x0b, 0x6d, 0x9a, 0x35, 0xfe, 0x32, 0x65, 0xff, 0x23, 0x7a, 0xe8, 0xfe, 0xff, 0xca,
0x38, 0xed, 0xf6, 0x6e, 0xaf, 0x6d, 0xb1, 0x69, 0xa9, 0xd2, 0x7b, 0xfd, 0x28, 0x74, 0xd7, 0x6a, 0x15, 0xfe, 0xf3, 0xe9, 0xce, 0x86, 0x85, 0xd3, 0x6e, 0xef, 0xf6, 0xda, 0x16, 0x9b, 0x91, 0x2a,
0x2d, 0x7b, 0x6f, 0x8f, 0x64, 0xec, 0x39, 0x1f, 0x59, 0xee, 0x4b, 0x73, 0x34, 0xec, 0x91, 0xb4, 0xbd, 0xd7, 0x8f, 0x42, 0x77, 0xad, 0xd6, 0xb2, 0xf7, 0xf6, 0x48, 0xc6, 0x9e, 0xf3, 0xa1, 0xe5,
0x6b, 0x31, 0xdb, 0x63, 0x52, 0x25, 0xbf, 0xae, 0x89, 0x50, 0x5e, 0x8b, 0xd5, 0x84, 0xbd, 0x2f, 0xbe, 0x34, 0x47, 0xc3, 0x1e, 0x49, 0xbb, 0x16, 0xb3, 0x3d, 0x26, 0x55, 0xf2, 0xeb, 0x9a, 0x08,
0x39, 0x4e, 0x6f, 0xd2, 0x6a, 0x7e, 0xf2, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x54, 0x8b, 0x8e, 0xe5, 0xb5, 0x58, 0x4d, 0xd8, 0xfb, 0xa2, 0xe3, 0xf4, 0x26, 0xad, 0xe6, 0x27, 0xff, 0x1d, 0x00,
0x9b, 0xde, 0x15, 0x00, 0x00, 0x00, 0xff, 0xff, 0x32, 0xc4, 0xc7, 0x30, 0xf3, 0x15, 0x00, 0x00,
} }

File diff suppressed because it is too large Load Diff

View File

@ -81,3 +81,9 @@ const (
MetricTypeKey = "metric_type" MetricTypeKey = "metric_type"
DimKey = "dim" DimKey = "dim"
) )
// Collection properties key
const (
CollectionTTLConfigKey = "collection.ttl.seconds"
)

View File

@ -400,80 +400,81 @@ const char descriptor_table_protodef_common_2eproto[] PROTOBUF_SECTION_VARIABLE(
"aled\020\003\022\013\n\007Flushed\020\004\022\014\n\010Flushing\020\005\022\013\n\007Dro" "aled\020\003\022\013\n\007Flushed\020\004\022\014\n\010Flushing\020\005\022\013\n\007Dro"
"pped\020\006\022\r\n\tImporting\020\007*>\n\017PlaceholderType" "pped\020\006\022\r\n\tImporting\020\007*>\n\017PlaceholderType"
"\022\010\n\004None\020\000\022\020\n\014BinaryVector\020d\022\017\n\013FloatVec" "\022\010\n\004None\020\000\022\020\n\014BinaryVector\020d\022\017\n\013FloatVec"
"tor\020e*\370\014\n\007MsgType\022\r\n\tUndefined\020\000\022\024\n\020Crea" "tor\020e*\215\r\n\007MsgType\022\r\n\tUndefined\020\000\022\024\n\020Crea"
"teCollection\020d\022\022\n\016DropCollection\020e\022\021\n\rHa" "teCollection\020d\022\022\n\016DropCollection\020e\022\021\n\rHa"
"sCollection\020f\022\026\n\022DescribeCollection\020g\022\023\n" "sCollection\020f\022\026\n\022DescribeCollection\020g\022\023\n"
"\017ShowCollections\020h\022\024\n\020GetSystemConfigs\020i" "\017ShowCollections\020h\022\024\n\020GetSystemConfigs\020i"
"\022\022\n\016LoadCollection\020j\022\025\n\021ReleaseCollectio" "\022\022\n\016LoadCollection\020j\022\025\n\021ReleaseCollectio"
"n\020k\022\017\n\013CreateAlias\020l\022\r\n\tDropAlias\020m\022\016\n\nA" "n\020k\022\017\n\013CreateAlias\020l\022\r\n\tDropAlias\020m\022\016\n\nA"
"lterAlias\020n\022\024\n\017CreatePartition\020\310\001\022\022\n\rDro" "lterAlias\020n\022\023\n\017AlterCollection\020o\022\024\n\017Crea"
"pPartition\020\311\001\022\021\n\014HasPartition\020\312\001\022\026\n\021Desc" "tePartition\020\310\001\022\022\n\rDropPartition\020\311\001\022\021\n\014Ha"
"ribePartition\020\313\001\022\023\n\016ShowPartitions\020\314\001\022\023\n" "sPartition\020\312\001\022\026\n\021DescribePartition\020\313\001\022\023\n"
"\016LoadPartitions\020\315\001\022\026\n\021ReleasePartitions\020" "\016ShowPartitions\020\314\001\022\023\n\016LoadPartitions\020\315\001\022"
"\316\001\022\021\n\014ShowSegments\020\372\001\022\024\n\017DescribeSegment" "\026\n\021ReleasePartitions\020\316\001\022\021\n\014ShowSegments\020"
"\020\373\001\022\021\n\014LoadSegments\020\374\001\022\024\n\017ReleaseSegment" "\372\001\022\024\n\017DescribeSegment\020\373\001\022\021\n\014LoadSegments"
"s\020\375\001\022\024\n\017HandoffSegments\020\376\001\022\030\n\023LoadBalanc" "\020\374\001\022\024\n\017ReleaseSegments\020\375\001\022\024\n\017HandoffSegm"
"eSegments\020\377\001\022\025\n\020DescribeSegments\020\200\002\022\020\n\013C" "ents\020\376\001\022\030\n\023LoadBalanceSegments\020\377\001\022\025\n\020Des"
"reateIndex\020\254\002\022\022\n\rDescribeIndex\020\255\002\022\016\n\tDro" "cribeSegments\020\200\002\022\020\n\013CreateIndex\020\254\002\022\022\n\rDe"
"pIndex\020\256\002\022\013\n\006Insert\020\220\003\022\013\n\006Delete\020\221\003\022\n\n\005F" "scribeIndex\020\255\002\022\016\n\tDropIndex\020\256\002\022\013\n\006Insert"
"lush\020\222\003\022\027\n\022ResendSegmentStats\020\223\003\022\013\n\006Sear" "\020\220\003\022\013\n\006Delete\020\221\003\022\n\n\005Flush\020\222\003\022\027\n\022ResendSe"
"ch\020\364\003\022\021\n\014SearchResult\020\365\003\022\022\n\rGetIndexStat" "gmentStats\020\223\003\022\013\n\006Search\020\364\003\022\021\n\014SearchResu"
"e\020\366\003\022\032\n\025GetIndexBuildProgress\020\367\003\022\034\n\027GetC" "lt\020\365\003\022\022\n\rGetIndexState\020\366\003\022\032\n\025GetIndexBui"
"ollectionStatistics\020\370\003\022\033\n\026GetPartitionSt" "ldProgress\020\367\003\022\034\n\027GetCollectionStatistics"
"atistics\020\371\003\022\r\n\010Retrieve\020\372\003\022\023\n\016RetrieveRe" "\020\370\003\022\033\n\026GetPartitionStatistics\020\371\003\022\r\n\010Retr"
"sult\020\373\003\022\024\n\017WatchDmChannels\020\374\003\022\025\n\020RemoveD" "ieve\020\372\003\022\023\n\016RetrieveResult\020\373\003\022\024\n\017WatchDmC"
"mChannels\020\375\003\022\027\n\022WatchQueryChannels\020\376\003\022\030\n" "hannels\020\374\003\022\025\n\020RemoveDmChannels\020\375\003\022\027\n\022Wat"
"\023RemoveQueryChannels\020\377\003\022\035\n\030SealedSegment" "chQueryChannels\020\376\003\022\030\n\023RemoveQueryChannel"
"sChangeInfo\020\200\004\022\027\n\022WatchDeltaChannels\020\201\004\022" "s\020\377\003\022\035\n\030SealedSegmentsChangeInfo\020\200\004\022\027\n\022W"
"\024\n\017GetShardLeaders\020\202\004\022\020\n\013GetReplicas\020\203\004\022" "atchDeltaChannels\020\201\004\022\024\n\017GetShardLeaders\020"
"\023\n\016UnsubDmChannel\020\204\004\022\024\n\017GetDistribution\020" "\202\004\022\020\n\013GetReplicas\020\203\004\022\023\n\016UnsubDmChannel\020\204"
"\205\004\022\025\n\020SyncDistribution\020\206\004\022\020\n\013SegmentInfo" "\004\022\024\n\017GetDistribution\020\205\004\022\025\n\020SyncDistribut"
"\020\330\004\022\017\n\nSystemInfo\020\331\004\022\024\n\017GetRecoveryInfo\020" "ion\020\206\004\022\020\n\013SegmentInfo\020\330\004\022\017\n\nSystemInfo\020\331"
"\332\004\022\024\n\017GetSegmentState\020\333\004\022\r\n\010TimeTick\020\260\t\022" "\004\022\024\n\017GetRecoveryInfo\020\332\004\022\024\n\017GetSegmentSta"
"\023\n\016QueryNodeStats\020\261\t\022\016\n\tLoadIndex\020\262\t\022\016\n\t" "te\020\333\004\022\r\n\010TimeTick\020\260\t\022\023\n\016QueryNodeStats\020\261"
"RequestID\020\263\t\022\017\n\nRequestTSO\020\264\t\022\024\n\017Allocat" "\t\022\016\n\tLoadIndex\020\262\t\022\016\n\tRequestID\020\263\t\022\017\n\nReq"
"eSegment\020\265\t\022\026\n\021SegmentStatistics\020\266\t\022\025\n\020S" "uestTSO\020\264\t\022\024\n\017AllocateSegment\020\265\t\022\026\n\021Segm"
"egmentFlushDone\020\267\t\022\017\n\nDataNodeTt\020\270\t\022\025\n\020C" "entStatistics\020\266\t\022\025\n\020SegmentFlushDone\020\267\t\022"
"reateCredential\020\334\013\022\022\n\rGetCredential\020\335\013\022\025" "\017\n\nDataNodeTt\020\270\t\022\025\n\020CreateCredential\020\334\013\022"
"\n\020DeleteCredential\020\336\013\022\025\n\020UpdateCredentia" "\022\n\rGetCredential\020\335\013\022\025\n\020DeleteCredential\020"
"l\020\337\013\022\026\n\021ListCredUsernames\020\340\013\022\017\n\nCreateRo" "\336\013\022\025\n\020UpdateCredential\020\337\013\022\026\n\021ListCredUse"
"le\020\300\014\022\r\n\010DropRole\020\301\014\022\024\n\017OperateUserRole\020" "rnames\020\340\013\022\017\n\nCreateRole\020\300\014\022\r\n\010DropRole\020\301"
"\302\014\022\017\n\nSelectRole\020\303\014\022\017\n\nSelectUser\020\304\014\022\023\n\016" "\014\022\024\n\017OperateUserRole\020\302\014\022\017\n\nSelectRole\020\303\014"
"SelectResource\020\305\014\022\025\n\020OperatePrivilege\020\306\014" "\022\017\n\nSelectUser\020\304\014\022\023\n\016SelectResource\020\305\014\022\025"
"\022\020\n\013SelectGrant\020\307\014\022\033\n\026RefreshPolicyInfoC" "\n\020OperatePrivilege\020\306\014\022\020\n\013SelectGrant\020\307\014\022"
"ache\020\310\014\022\017\n\nListPolicy\020\311\014*\"\n\007DslType\022\007\n\003D" "\033\n\026RefreshPolicyInfoCache\020\310\014\022\017\n\nListPoli"
"sl\020\000\022\016\n\nBoolExprV1\020\001*B\n\017CompactionState\022" "cy\020\311\014*\"\n\007DslType\022\007\n\003Dsl\020\000\022\016\n\nBoolExprV1\020"
"\021\n\rUndefiedState\020\000\022\r\n\tExecuting\020\001\022\r\n\tCom" "\001*B\n\017CompactionState\022\021\n\rUndefiedState\020\000\022"
"pleted\020\002*X\n\020ConsistencyLevel\022\n\n\006Strong\020\000" "\r\n\tExecuting\020\001\022\r\n\tCompleted\020\002*X\n\020Consist"
"\022\013\n\007Session\020\001\022\013\n\007Bounded\020\002\022\016\n\nEventually" "encyLevel\022\n\n\006Strong\020\000\022\013\n\007Session\020\001\022\013\n\007Bo"
"\020\003\022\016\n\nCustomized\020\004*\213\001\n\013ImportState\022\021\n\rIm" "unded\020\002\022\016\n\nEventually\020\003\022\016\n\nCustomized\020\004*"
"portPending\020\000\022\020\n\014ImportFailed\020\001\022\021\n\rImpor" "\213\001\n\013ImportState\022\021\n\rImportPending\020\000\022\020\n\014Im"
"tStarted\020\002\022\023\n\017ImportPersisted\020\005\022\023\n\017Impor" "portFailed\020\001\022\021\n\rImportStarted\020\002\022\023\n\017Impor"
"tCompleted\020\006\022\032\n\026ImportFailedAndCleaned\020\007" "tPersisted\020\005\022\023\n\017ImportCompleted\020\006\022\032\n\026Imp"
"*2\n\nObjectType\022\016\n\nCollection\020\000\022\n\n\006Global" "ortFailedAndCleaned\020\007*2\n\nObjectType\022\016\n\nC"
"\020\001\022\010\n\004User\020\002*\206\005\n\017ObjectPrivilege\022\020\n\014Priv" "ollection\020\000\022\n\n\006Global\020\001\022\010\n\004User\020\002*\206\005\n\017Ob"
"ilegeAll\020\000\022\035\n\031PrivilegeCreateCollection\020" "jectPrivilege\022\020\n\014PrivilegeAll\020\000\022\035\n\031Privi"
"\001\022\033\n\027PrivilegeDropCollection\020\002\022\037\n\033Privil" "legeCreateCollection\020\001\022\033\n\027PrivilegeDropC"
"egeDescribeCollection\020\003\022\034\n\030PrivilegeShow" "ollection\020\002\022\037\n\033PrivilegeDescribeCollecti"
"Collections\020\004\022\021\n\rPrivilegeLoad\020\005\022\024\n\020Priv" "on\020\003\022\034\n\030PrivilegeShowCollections\020\004\022\021\n\rPr"
"ilegeRelease\020\006\022\027\n\023PrivilegeCompaction\020\007\022" "ivilegeLoad\020\005\022\024\n\020PrivilegeRelease\020\006\022\027\n\023P"
"\023\n\017PrivilegeInsert\020\010\022\023\n\017PrivilegeDelete\020" "rivilegeCompaction\020\007\022\023\n\017PrivilegeInsert\020"
"\t\022\032\n\026PrivilegeGetStatistics\020\n\022\030\n\024Privile" "\010\022\023\n\017PrivilegeDelete\020\t\022\032\n\026PrivilegeGetSt"
"geCreateIndex\020\013\022\030\n\024PrivilegeIndexDetail\020" "atistics\020\n\022\030\n\024PrivilegeCreateIndex\020\013\022\030\n\024"
"\014\022\026\n\022PrivilegeDropIndex\020\r\022\023\n\017PrivilegeSe" "PrivilegeIndexDetail\020\014\022\026\n\022PrivilegeDropI"
"arch\020\016\022\022\n\016PrivilegeFlush\020\017\022\022\n\016PrivilegeQ" "ndex\020\r\022\023\n\017PrivilegeSearch\020\016\022\022\n\016Privilege"
"uery\020\020\022\030\n\024PrivilegeLoadBalance\020\021\022\023\n\017Priv" "Flush\020\017\022\022\n\016PrivilegeQuery\020\020\022\030\n\024Privilege"
"ilegeImport\020\022\022\034\n\030PrivilegeCreateOwnershi" "LoadBalance\020\021\022\023\n\017PrivilegeImport\020\022\022\034\n\030Pr"
"p\020\023\022\027\n\023PrivilegeUpdateUser\020\024\022\032\n\026Privileg" "ivilegeCreateOwnership\020\023\022\027\n\023PrivilegeUpd"
"eDropOwnership\020\025\022\034\n\030PrivilegeSelectOwner" "ateUser\020\024\022\032\n\026PrivilegeDropOwnership\020\025\022\034\n"
"ship\020\026\022\034\n\030PrivilegeManageOwnership\020\027\022\027\n\023" "\030PrivilegeSelectOwnership\020\026\022\034\n\030Privilege"
"PrivilegeSelectUser\020\030*E\n\tStateCode\022\020\n\014In" "ManageOwnership\020\027\022\027\n\023PrivilegeSelectUser"
"itializing\020\000\022\013\n\007Healthy\020\001\022\014\n\010Abnormal\020\002\022" "\020\030*E\n\tStateCode\022\020\n\014Initializing\020\000\022\013\n\007Hea"
"\013\n\007StandBy\020\003:^\n\021privilege_ext_obj\022\037.goog" "lthy\020\001\022\014\n\010Abnormal\020\002\022\013\n\007StandBy\020\003:^\n\021pri"
"le.protobuf.MessageOptions\030\351\007 \001(\0132!.milv" "vilege_ext_obj\022\037.google.protobuf.Message"
"us.proto.common.PrivilegeExtBL\n\016io.milvu" "Options\030\351\007 \001(\0132!.milvus.proto.common.Pri"
"s.grpcB\013CommonProtoP\001Z(github.com/milvus" "vilegeExtBL\n\016io.milvus.grpcB\013CommonProto"
"-io/milvus/api/commonpb\240\001\001b\006proto3" "P\001Z(github.com/milvus-io/milvus/api/comm"
"onpb\240\001\001b\006proto3"
; ;
static const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable*const descriptor_table_common_2eproto_deps[1] = { static const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable*const descriptor_table_common_2eproto_deps[1] = {
&::descriptor_table_google_2fprotobuf_2fdescriptor_2eproto, &::descriptor_table_google_2fprotobuf_2fdescriptor_2eproto,
@ -494,7 +495,7 @@ static ::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase*const descriptor_table_com
static ::PROTOBUF_NAMESPACE_ID::internal::once_flag descriptor_table_common_2eproto_once; static ::PROTOBUF_NAMESPACE_ID::internal::once_flag descriptor_table_common_2eproto_once;
static bool descriptor_table_common_2eproto_initialized = false; static bool descriptor_table_common_2eproto_initialized = false;
const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_common_2eproto = { const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_common_2eproto = {
&descriptor_table_common_2eproto_initialized, descriptor_table_protodef_common_2eproto, "common.proto", 5354, &descriptor_table_common_2eproto_initialized, descriptor_table_protodef_common_2eproto, "common.proto", 5375,
&descriptor_table_common_2eproto_once, descriptor_table_common_2eproto_sccs, descriptor_table_common_2eproto_deps, 11, 1, &descriptor_table_common_2eproto_once, descriptor_table_common_2eproto_sccs, descriptor_table_common_2eproto_deps, 11, 1,
schemas, file_default_instances, TableStruct_common_2eproto::offsets, schemas, file_default_instances, TableStruct_common_2eproto::offsets,
file_level_metadata_common_2eproto, 11, file_level_enum_descriptors_common_2eproto, file_level_service_descriptors_common_2eproto, file_level_metadata_common_2eproto, 11, file_level_enum_descriptors_common_2eproto, file_level_service_descriptors_common_2eproto,
@ -640,6 +641,7 @@ bool MsgType_IsValid(int value) {
case 108: case 108:
case 109: case 109:
case 110: case 110:
case 111:
case 200: case 200:
case 201: case 201:
case 202: case 202:

View File

@ -286,6 +286,7 @@ enum MsgType : int {
CreateAlias = 108, CreateAlias = 108,
DropAlias = 109, DropAlias = 109,
AlterAlias = 110, AlterAlias = 110,
AlterCollection = 111,
CreatePartition = 200, CreatePartition = 200,
DropPartition = 201, DropPartition = 201,
HasPartition = 202, HasPartition = 202,

View File

@ -23,6 +23,8 @@ import (
"sync" "sync"
"time" "time"
"github.com/milvus-io/milvus/internal/util/tsoutil"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/log" "github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/datapb"
@ -35,19 +37,20 @@ import (
) )
type compactTime struct { type compactTime struct {
travelTime Timestamp travelTime Timestamp
expireTime Timestamp expireTime Timestamp
collectionTTL time.Duration
} }
type trigger interface { type trigger interface {
start() start()
stop() stop()
// triggerCompaction triggers a compaction if any compaction condition satisfy. // triggerCompaction triggers a compaction if any compaction condition satisfy.
triggerCompaction(compactTime *compactTime) error triggerCompaction() error
// triggerSingleCompaction triggers a compaction bundled with collection-partition-channel-segment // triggerSingleCompaction triggers a compaction bundled with collection-partition-channel-segment
triggerSingleCompaction(collectionID, partitionID, segmentID int64, channel string, compactTime *compactTime) error triggerSingleCompaction(collectionID, partitionID, segmentID int64, channel string) error
// forceTriggerCompaction force to start a compaction // forceTriggerCompaction force to start a compaction
forceTriggerCompaction(collectionID int64, compactTime *compactTime) (UniqueID, error) forceTriggerCompaction(collectionID int64) (UniqueID, error)
} }
type compactionSignal struct { type compactionSignal struct {
@ -58,7 +61,6 @@ type compactionSignal struct {
partitionID UniqueID partitionID UniqueID
segmentID UniqueID segmentID UniqueID
channel string channel string
compactTime *compactTime
} }
var _ trigger = (*compactionTrigger)(nil) var _ trigger = (*compactionTrigger)(nil)
@ -140,15 +142,7 @@ func (t *compactionTrigger) startGlobalCompactionLoop() {
log.Info("global compaction loop exit") log.Info("global compaction loop exit")
return return
case <-t.globalTrigger.C: case <-t.globalTrigger.C:
cctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) err := t.triggerCompaction()
ct, err := GetCompactTime(cctx, t.allocator)
if err != nil {
log.Warn("unbale to get compaction time", zap.Error(err))
cancel()
continue
}
cancel()
err = t.triggerCompaction(ct)
if err != nil { if err != nil {
log.Warn("unable to triggerCompaction", zap.Error(err)) log.Warn("unable to triggerCompaction", zap.Error(err))
} }
@ -161,24 +155,61 @@ func (t *compactionTrigger) stop() {
t.wg.Wait() t.wg.Wait()
} }
func (t *compactionTrigger) allocTs() (Timestamp, error) {
cctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
ts, err := t.allocator.allocTimestamp(cctx)
if err != nil {
return 0, err
}
return ts, nil
}
func (t *compactionTrigger) getCompactTime(ts Timestamp, collectionID UniqueID) (*compactTime, error) {
coll := t.meta.GetCollection(collectionID)
if coll == nil {
return nil, fmt.Errorf("collection ID %d not found", collectionID)
}
collectionTTL, err := getCollectionTTL(coll.Properties)
if err != nil {
return nil, err
}
pts, _ := tsoutil.ParseTS(ts)
ttRetention := pts.Add(-time.Duration(Params.CommonCfg.RetentionDuration) * time.Second)
ttRetentionLogic := tsoutil.ComposeTS(ttRetention.UnixNano()/int64(time.Millisecond), 0)
if collectionTTL > 0 {
ttexpired := pts.Add(-collectionTTL)
ttexpiredLogic := tsoutil.ComposeTS(ttexpired.UnixNano()/int64(time.Millisecond), 0)
return &compactTime{ttRetentionLogic, ttexpiredLogic, collectionTTL}, nil
}
// no expiration time
return &compactTime{ttRetentionLogic, 0, 0}, nil
}
// triggerCompaction trigger a compaction if any compaction condition satisfy. // triggerCompaction trigger a compaction if any compaction condition satisfy.
func (t *compactionTrigger) triggerCompaction(compactTime *compactTime) error { func (t *compactionTrigger) triggerCompaction() error {
id, err := t.allocSignalID() id, err := t.allocSignalID()
if err != nil { if err != nil {
return err return err
} }
signal := &compactionSignal{ signal := &compactionSignal{
id: id, id: id,
isForce: false, isForce: false,
isGlobal: true, isGlobal: true,
compactTime: compactTime,
} }
t.signals <- signal t.signals <- signal
return nil return nil
} }
// triggerSingleCompaction triger a compaction bundled with collection-partiiton-channel-segment // triggerSingleCompaction triger a compaction bundled with collection-partiiton-channel-segment
func (t *compactionTrigger) triggerSingleCompaction(collectionID, partitionID, segmentID int64, channel string, compactTime *compactTime) error { func (t *compactionTrigger) triggerSingleCompaction(collectionID, partitionID, segmentID int64, channel string) error {
// If AutoCompaction diabled, flush request will not trigger compaction // If AutoCompaction diabled, flush request will not trigger compaction
if !Params.DataCoordCfg.GetEnableAutoCompaction() { if !Params.DataCoordCfg.GetEnableAutoCompaction() {
return nil return nil
@ -196,7 +227,6 @@ func (t *compactionTrigger) triggerSingleCompaction(collectionID, partitionID, s
partitionID: partitionID, partitionID: partitionID,
segmentID: segmentID, segmentID: segmentID,
channel: channel, channel: channel,
compactTime: compactTime,
} }
t.signals <- signal t.signals <- signal
return nil return nil
@ -204,7 +234,7 @@ func (t *compactionTrigger) triggerSingleCompaction(collectionID, partitionID, s
// forceTriggerCompaction force to start a compaction // forceTriggerCompaction force to start a compaction
// invoked by user `ManualCompaction` operation // invoked by user `ManualCompaction` operation
func (t *compactionTrigger) forceTriggerCompaction(collectionID int64, compactTime *compactTime) (UniqueID, error) { func (t *compactionTrigger) forceTriggerCompaction(collectionID int64) (UniqueID, error) {
id, err := t.allocSignalID() id, err := t.allocSignalID()
if err != nil { if err != nil {
return -1, err return -1, err
@ -214,7 +244,6 @@ func (t *compactionTrigger) forceTriggerCompaction(collectionID int64, compactTi
isForce: true, isForce: true,
isGlobal: true, isGlobal: true,
collectionID: collectionID, collectionID: collectionID,
compactTime: compactTime,
} }
t.handleGlobalSignal(signal) t.handleGlobalSignal(signal)
return id, nil return id, nil
@ -226,14 +255,6 @@ func (t *compactionTrigger) allocSignalID() (UniqueID, error) {
return t.allocator.allocID(ctx) return t.allocator.allocID(ctx)
} }
func getPlanIDs(plans []*datapb.CompactionPlan) []int64 {
ids := make([]int64, 0, len(plans))
for _, p := range plans {
ids = append(ids, p.GetPlanID())
}
return ids
}
func (t *compactionTrigger) estimateDiskSegmentMaxNumOfRows(collectionID UniqueID) (int, error) { func (t *compactionTrigger) estimateDiskSegmentMaxNumOfRows(collectionID UniqueID) (int, error) {
collMeta := t.meta.GetCollection(collectionID) collMeta := t.meta.GetCollection(collectionID)
if collMeta == nil { if collMeta == nil {
@ -288,6 +309,19 @@ func (t *compactionTrigger) handleGlobalSignal(signal *compactionSignal) {
!segment.GetIsImporting() // not importing now !segment.GetIsImporting() // not importing now
}) // m is list of chanPartSegments, which is channel-partition organized segments }) // m is list of chanPartSegments, which is channel-partition organized segments
if len(m) == 0 {
return
}
ts, err := t.allocTs()
if err != nil {
log.Warn("allocate ts failed, skip to handle compaction",
zap.Int64("collectionID", signal.collectionID),
zap.Int64("partitionID", signal.partitionID),
zap.Int64("segmentID", signal.segmentID))
return
}
for _, group := range m { for _, group := range m {
if !signal.isForce && t.compactionHandler.isFull() { if !signal.isForce && t.compactionHandler.isFull() {
break break
@ -301,7 +335,16 @@ func (t *compactionTrigger) handleGlobalSignal(signal *compactionSignal) {
continue continue
} }
plans := t.generatePlans(group.segments, signal.isForce, signal.compactTime) ct, err := t.getCompactTime(ts, group.collectionID)
if err != nil {
log.Warn("get compact time failed, skip to handle compaction",
zap.Int64("collectionID", group.collectionID),
zap.Int64("partitionID", group.partitionID),
zap.String("channel", group.channelName))
return
}
plans := t.generatePlans(group.segments, signal.isForce, ct)
for _, plan := range plans { for _, plan := range plans {
if !signal.isForce && t.compactionHandler.isFull() { if !signal.isForce && t.compactionHandler.isFull() {
log.Warn("compaction plan skipped due to handler full", zap.Int64("collection", signal.collectionID), zap.Int64("planID", plan.PlanID)) log.Warn("compaction plan skipped due to handler full", zap.Int64("collection", signal.collectionID), zap.Int64("planID", plan.PlanID))
@ -349,12 +392,30 @@ func (t *compactionTrigger) handleSignal(signal *compactionSignal) {
partitionID := segment.GetPartitionID() partitionID := segment.GetPartitionID()
segments := t.getCandidateSegments(channel, partitionID) segments := t.getCandidateSegments(channel, partitionID)
if len(segments) == 0 {
return
}
err := t.updateSegmentMaxSize(segments) err := t.updateSegmentMaxSize(segments)
if err != nil { if err != nil {
log.Warn("failed to update segment max size", zap.Error(err)) log.Warn("failed to update segment max size", zap.Error(err))
} }
plans := t.generatePlans(segments, signal.isForce, signal.compactTime) ts, err := t.allocTs()
if err != nil {
log.Warn("allocate ts failed, skip to handle compaction", zap.Int64("collectionID", signal.collectionID),
zap.Int64("partitionID", signal.partitionID), zap.Int64("segmentID", signal.segmentID))
return
}
ct, err := t.getCompactTime(ts, segment.GetCollectionID())
if err != nil {
log.Warn("get compact time failed, skip to handle compaction", zap.Int64("collectionID", segment.GetCollectionID()),
zap.Int64("partitionID", partitionID), zap.String("channel", channel))
return
}
plans := t.generatePlans(segments, signal.isForce, ct)
for _, plan := range plans { for _, plan := range plans {
if t.compactionHandler.isFull() { if t.compactionHandler.isFull() {
log.Warn("compaction plan skipped due to handler full", zap.Int64("collection", signal.collectionID), zap.Int64("planID", plan.PlanID)) log.Warn("compaction plan skipped due to handler full", zap.Int64("collection", signal.collectionID), zap.Int64("planID", plan.PlanID))
@ -478,9 +539,10 @@ func (t *compactionTrigger) generatePlans(segments []*SegmentInfo, force bool, c
func segmentsToPlan(segments []*SegmentInfo, compactTime *compactTime) *datapb.CompactionPlan { func segmentsToPlan(segments []*SegmentInfo, compactTime *compactTime) *datapb.CompactionPlan {
plan := &datapb.CompactionPlan{ plan := &datapb.CompactionPlan{
Timetravel: compactTime.travelTime, Timetravel: compactTime.travelTime,
Type: datapb.CompactionType_MixCompaction, Type: datapb.CompactionType_MixCompaction,
Channel: segments[0].GetInsertChannel(), Channel: segments[0].GetInsertChannel(),
CollectionTtl: compactTime.collectionTTL.Nanoseconds(),
} }
for _, s := range segments { for _, s := range segments {

View File

@ -22,6 +22,10 @@ import (
"testing" "testing"
"time" "time"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/internal/util/tsoutil"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/schemapb" "github.com/milvus-io/milvus/api/schemapb"
"github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/datapb"
@ -79,18 +83,21 @@ func Test_compactionTrigger_force(t *testing.T) {
globalTrigger *time.Ticker globalTrigger *time.Ticker
segRefer *SegmentReferenceManager segRefer *SegmentReferenceManager
} }
type args struct {
collectionID int64
compactTime *compactTime
}
Params.Init() Params.Init()
Params.CommonCfg.RetentionDuration = 200
pts, _ := tsoutil.ParseTS(0)
ttRetention := pts.Add(-time.Duration(Params.CommonCfg.RetentionDuration) * time.Second)
timeTravel := tsoutil.ComposeTS(ttRetention.UnixNano()/int64(time.Millisecond), 0)
vecFieldID := int64(201) vecFieldID := int64(201)
tests := []struct { tests := []struct {
name string name string
fields fields fields fields
args args collectionID UniqueID
wantErr bool wantErr bool
wantPlans []*datapb.CompactionPlan wantPlans []*datapb.CompactionPlan
}{ }{
{ {
"test force compaction", "test force compaction",
@ -150,9 +157,21 @@ func Test_compactionTrigger_force(t *testing.T) {
}, },
}, },
}, },
3: {
SegmentInfo: &datapb.SegmentInfo{
ID: 3,
CollectionID: 1111,
PartitionID: 1,
LastExpireTime: 100,
NumOfRows: 100,
MaxRowNum: 300,
InsertChannel: "ch1",
State: commonpb.SegmentState_Flushed,
},
},
}, },
}, },
collections: map[int64]*datapb.CollectionInfo{ collections: map[int64]*collectionInfo{
2: { 2: {
ID: 2, ID: 2,
Schema: &schemapb.CollectionSchema{ Schema: &schemapb.CollectionSchema{
@ -163,6 +182,29 @@ func Test_compactionTrigger_force(t *testing.T) {
}, },
}, },
}, },
Properties: map[string]string{
common.CollectionTTLConfigKey: "0",
},
},
1111: {
ID: 1111,
Schema: &schemapb.CollectionSchema{
Fields: []*schemapb.FieldSchema{
{
FieldID: vecFieldID,
DataType: schemapb.DataType_FloatVector,
TypeParams: []*commonpb.KeyValuePair{
{
Key: "dim",
Value: "128",
},
},
},
},
},
Properties: map[string]string{
common.CollectionTTLConfigKey: "error",
},
}, },
1000: { 1000: {
ID: 1000, ID: 1000,
@ -239,20 +281,17 @@ func Test_compactionTrigger_force(t *testing.T) {
}, },
}, },
}, },
newMockAllocator(), &MockAllocator0{},
nil, nil,
&spyCompactionHandler{spyChan: make(chan *datapb.CompactionPlan, 1)}, &spyCompactionHandler{spyChan: make(chan *datapb.CompactionPlan, 1)},
nil, nil,
&SegmentReferenceManager{segmentsLock: map[UniqueID]map[UniqueID]*datapb.SegmentReferenceLock{}}, &SegmentReferenceManager{segmentsLock: map[UniqueID]map[UniqueID]*datapb.SegmentReferenceLock{}},
}, },
args{ 2,
2,
&compactTime{travelTime: 200, expireTime: 0},
},
false, false,
[]*datapb.CompactionPlan{ []*datapb.CompactionPlan{
{ {
PlanID: 2, PlanID: 0,
SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{
{ {
SegmentID: 1, SegmentID: 1,
@ -294,7 +333,7 @@ func Test_compactionTrigger_force(t *testing.T) {
StartTime: 0, StartTime: 0,
TimeoutInSeconds: Params.DataCoordCfg.CompactionTimeoutInSeconds, TimeoutInSeconds: Params.DataCoordCfg.CompactionTimeoutInSeconds,
Type: datapb.CompactionType_MixCompaction, Type: datapb.CompactionType_MixCompaction,
Timetravel: 200, Timetravel: timeTravel,
Channel: "ch1", Channel: "ch1",
}, },
}, },
@ -313,7 +352,7 @@ func Test_compactionTrigger_force(t *testing.T) {
segRefer: tt.fields.segRefer, segRefer: tt.fields.segRefer,
indexCoord: indexCoord, indexCoord: indexCoord,
} }
_, err := tr.forceTriggerCompaction(tt.args.collectionID, tt.args.compactTime) _, err := tr.forceTriggerCompaction(tt.collectionID)
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
plan := <-spy.spyChan plan := <-spy.spyChan
@ -339,8 +378,8 @@ func Test_compactionTrigger_force(t *testing.T) {
indexCoord: indexCood, indexCoord: indexCood,
estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex, estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex,
} }
tt.args.collectionID = 1000 tt.collectionID = 1000
_, err := tr.forceTriggerCompaction(tt.args.collectionID, tt.args.compactTime) _, err := tr.forceTriggerCompaction(tt.collectionID)
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
// expect max row num = 2048*1024*1024/(128*4) = 4194304 // expect max row num = 2048*1024*1024/(128*4) = 4194304
assert.EqualValues(t, 4194304, tt.fields.meta.segments.GetSegments()[0].MaxRowNum) assert.EqualValues(t, 4194304, tt.fields.meta.segments.GetSegments()[0].MaxRowNum)
@ -366,8 +405,8 @@ func Test_compactionTrigger_force(t *testing.T) {
indexCoord: indexCood, indexCoord: indexCood,
estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex, estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex,
} }
tt.args.collectionID = 2000 tt.collectionID = 2000
_, err := tr.forceTriggerCompaction(tt.args.collectionID, tt.args.compactTime) _, err := tr.forceTriggerCompaction(tt.collectionID)
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
hasPlan := true hasPlan := true
@ -398,8 +437,8 @@ func Test_compactionTrigger_force(t *testing.T) {
indexCoord: indexCood, indexCoord: indexCood,
estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex, estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex,
} }
tt.args.collectionID = 3000 tt.collectionID = 3000
_, err := tr.forceTriggerCompaction(tt.args.collectionID, tt.args.compactTime) _, err := tr.forceTriggerCompaction(tt.collectionID)
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
hasPlan := true hasPlan := true
@ -430,8 +469,8 @@ func Test_compactionTrigger_force(t *testing.T) {
indexCoord: indexCood, indexCoord: indexCood,
estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex, estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex,
} }
tt.args.collectionID = 4000 tt.collectionID = 4000
_, err := tr.forceTriggerCompaction(tt.args.collectionID, tt.args.compactTime) _, err := tr.forceTriggerCompaction(tt.collectionID)
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
hasPlan := true hasPlan := true
@ -461,8 +500,8 @@ func Test_compactionTrigger_force(t *testing.T) {
segRefer: tt.fields.segRefer, segRefer: tt.fields.segRefer,
indexCoord: indexCood, indexCoord: indexCood,
} }
tt.args.collectionID = 10000 tt.collectionID = 10000
_, err := tr.forceTriggerCompaction(tt.args.collectionID, tt.args.compactTime) _, err := tr.forceTriggerCompaction(tt.collectionID)
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
hasPlan := true hasPlan := true
@ -476,6 +515,120 @@ func Test_compactionTrigger_force(t *testing.T) {
assert.Equal(t, false, hasPlan) assert.Equal(t, false, hasPlan)
}) })
t.Run(tt.name+" with allocate ts error", func(t *testing.T) {
indexCood := newMockIndexCoord()
tr := &compactionTrigger{
meta: tt.fields.meta,
allocator: &FailsAllocator{allocIDSucceed: true},
signals: tt.fields.signals,
compactionHandler: tt.fields.compactionHandler,
globalTrigger: tt.fields.globalTrigger,
segRefer: tt.fields.segRefer,
indexCoord: indexCood,
estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex,
}
{
// test alloc ts fail for handle global signal
signal := &compactionSignal{
id: 0,
isForce: true,
isGlobal: true,
collectionID: tt.collectionID,
}
tr.handleGlobalSignal(signal)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
hasPlan := true
select {
case <-spy.spyChan:
hasPlan = true
case <-time.After(2 * time.Second):
hasPlan = false
}
assert.Equal(t, false, hasPlan)
}
{
// test alloc ts fail for handle signal
signal := &compactionSignal{
id: 0,
isForce: true,
collectionID: tt.collectionID,
segmentID: 3,
}
tr.handleSignal(signal)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
hasPlan := true
select {
case <-spy.spyChan:
hasPlan = true
case <-time.After(2 * time.Second):
hasPlan = false
}
assert.Equal(t, false, hasPlan)
}
})
t.Run(tt.name+" with getCompact error", func(t *testing.T) {
indexCood := newMockIndexCoord()
for _, segment := range tt.fields.meta.segments.GetSegments() {
segment.CollectionID = 1111
}
tr := &compactionTrigger{
meta: tt.fields.meta,
allocator: tt.fields.allocator,
signals: tt.fields.signals,
compactionHandler: tt.fields.compactionHandler,
globalTrigger: tt.fields.globalTrigger,
segRefer: tt.fields.segRefer,
indexCoord: indexCood,
estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex,
}
{
// test getCompactTime fail for handle global signal
signal := &compactionSignal{
id: 0,
isForce: true,
isGlobal: true,
collectionID: 1111,
}
tr.handleGlobalSignal(signal)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
hasPlan := true
select {
case <-spy.spyChan:
hasPlan = true
case <-time.After(2 * time.Second):
hasPlan = false
}
assert.Equal(t, false, hasPlan)
}
{
// test getCompactTime fail for handle signal
signal := &compactionSignal{
id: 0,
isForce: true,
collectionID: 1111,
segmentID: 3,
}
tr.handleSignal(signal)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
hasPlan := true
select {
case <-spy.spyChan:
hasPlan = true
case <-time.After(2 * time.Second):
hasPlan = false
}
assert.Equal(t, false, hasPlan)
}
})
} }
} }
@ -539,7 +692,7 @@ func Test_compactionTrigger_force_maxSegmentLimit(t *testing.T) {
fields{ fields{
&meta{ &meta{
segments: segmentInfos, segments: segmentInfos,
collections: map[int64]*datapb.CollectionInfo{ collections: map[int64]*collectionInfo{
2: { 2: {
ID: 2, ID: 2,
Schema: &schemapb.CollectionSchema{ Schema: &schemapb.CollectionSchema{
@ -626,7 +779,7 @@ func Test_compactionTrigger_force_maxSegmentLimit(t *testing.T) {
segRefer: &SegmentReferenceManager{segmentsLock: map[UniqueID]map[UniqueID]*datapb.SegmentReferenceLock{}}, segRefer: &SegmentReferenceManager{segmentsLock: map[UniqueID]map[UniqueID]*datapb.SegmentReferenceLock{}},
indexCoord: indexCoord, indexCoord: indexCoord,
} }
_, err := tr.forceTriggerCompaction(tt.args.collectionID, tt.args.compactTime) _, err := tr.forceTriggerCompaction(tt.args.collectionID)
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
@ -760,7 +913,7 @@ func Test_compactionTrigger_noplan(t *testing.T) {
}, },
}, },
}, },
collections: map[int64]*datapb.CollectionInfo{ collections: map[int64]*collectionInfo{
2: { 2: {
ID: 2, ID: 2,
Schema: &schemapb.CollectionSchema{ Schema: &schemapb.CollectionSchema{
@ -802,7 +955,7 @@ func Test_compactionTrigger_noplan(t *testing.T) {
} }
tr.start() tr.start()
defer tr.stop() defer tr.stop()
err := tr.triggerCompaction(tt.args.compactTime) err := tr.triggerCompaction()
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
select { select {
@ -951,7 +1104,7 @@ func Test_compactionTrigger_smallfiles(t *testing.T) {
}, },
}, },
}, },
collections: map[int64]*datapb.CollectionInfo{ collections: map[int64]*collectionInfo{
2: { 2: {
ID: 2, ID: 2,
Schema: &schemapb.CollectionSchema{ Schema: &schemapb.CollectionSchema{
@ -993,7 +1146,7 @@ func Test_compactionTrigger_smallfiles(t *testing.T) {
} }
tr.start() tr.start()
defer tr.stop() defer tr.stop()
err := tr.triggerCompaction(tt.args.compactTime) err := tr.triggerCompaction()
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
select { select {
@ -1072,7 +1225,7 @@ func Test_compactionTrigger_noplan_random_size(t *testing.T) {
fields{ fields{
&meta{ &meta{
segments: segmentInfos, segments: segmentInfos,
collections: map[int64]*datapb.CollectionInfo{ collections: map[int64]*collectionInfo{
2: { 2: {
ID: 2, ID: 2,
Schema: &schemapb.CollectionSchema{ Schema: &schemapb.CollectionSchema{
@ -1114,7 +1267,7 @@ func Test_compactionTrigger_noplan_random_size(t *testing.T) {
} }
tr.start() tr.start()
defer tr.stop() defer tr.stop()
err := tr.triggerCompaction(tt.args.compactTime) err := tr.triggerCompaction()
assert.Equal(t, tt.wantErr, err != nil) assert.Equal(t, tt.wantErr, err != nil)
spy := (tt.fields.compactionHandler).(*spyCompactionHandler) spy := (tt.fields.compactionHandler).(*spyCompactionHandler)
@ -1309,3 +1462,47 @@ func Test_handleSignal(t *testing.T) {
got.handleSignal(signal) got.handleSignal(signal)
}) })
} }
func Test_allocTs(t *testing.T) {
got := newCompactionTrigger(&meta{segments: NewSegmentsInfo()}, &compactionPlanHandler{}, newMockAllocator(),
&SegmentReferenceManager{segmentsLock: map[UniqueID]map[UniqueID]*datapb.SegmentReferenceLock{}}, nil)
ts, err := got.allocTs()
assert.NoError(t, err)
assert.True(t, ts > 0)
got = newCompactionTrigger(&meta{segments: NewSegmentsInfo()}, &compactionPlanHandler{}, &FailsAllocator{},
&SegmentReferenceManager{segmentsLock: map[UniqueID]map[UniqueID]*datapb.SegmentReferenceLock{}}, nil)
ts, err = got.allocTs()
assert.Error(t, err)
assert.Equal(t, uint64(0), ts)
}
func Test_getCompactTime(t *testing.T) {
collections := map[UniqueID]*collectionInfo{
1: {
ID: 1,
Schema: newTestSchema(),
Partitions: []UniqueID{1},
Properties: map[string]string{
common.CollectionTTLConfigKey: "10",
},
},
2: {
ID: 2,
Schema: newTestSchema(),
Partitions: []UniqueID{1},
Properties: map[string]string{
common.CollectionTTLConfigKey: "error",
},
},
}
m := &meta{segments: NewSegmentsInfo(), collections: collections}
got := newCompactionTrigger(m, &compactionPlanHandler{}, newMockAllocator(),
&SegmentReferenceManager{segmentsLock: map[UniqueID]map[UniqueID]*datapb.SegmentReferenceLock{}}, nil)
now := tsoutil.GetCurrentTime()
ct, err := got.getCompactTime(now, 1)
assert.NoError(t, err)
assert.NotNil(t, ct)
}

View File

@ -150,8 +150,8 @@ func (h *ServerHandler) GetVChanPositions(channel *channel, partitionID UniqueID
} }
} }
func getCollectionStartPosition(channel string, collectionInfo *datapb.CollectionInfo) *internalpb.MsgPosition { func getCollectionStartPosition(channel string, collectionInfo *collectionInfo) *internalpb.MsgPosition {
return toMsgPosition(channel, collectionInfo.GetStartPositions()) return toMsgPosition(channel, collectionInfo.StartPositions)
} }
func toMsgPosition(channel string, startPositions []*commonpb.KeyDataPair) *internalpb.MsgPosition { func toMsgPosition(channel string, startPositions []*commonpb.KeyDataPair) *internalpb.MsgPosition {
@ -184,7 +184,7 @@ func trimSegmentInfo(info *datapb.SegmentInfo) *datapb.SegmentInfo {
} }
// GetCollection returns collection info with specified collection id // GetCollection returns collection info with specified collection id
func (h *ServerHandler) GetCollection(ctx context.Context, collectionID UniqueID) *datapb.CollectionInfo { func (h *ServerHandler) GetCollection(ctx context.Context, collectionID UniqueID) *collectionInfo {
coll := h.s.meta.GetCollection(collectionID) coll := h.s.meta.GetCollection(collectionID)
if coll != nil { if coll != nil {
return coll return coll

View File

@ -23,9 +23,15 @@ import (
"sync" "sync"
"time" "time"
"golang.org/x/exp/maps"
"github.com/golang/protobuf/proto"
"github.com/milvus-io/milvus/internal/common"
"go.uber.org/zap" "go.uber.org/zap"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/schemapb"
"github.com/milvus-io/milvus/internal/kv" "github.com/milvus-io/milvus/internal/kv"
"github.com/milvus-io/milvus/internal/log" "github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/metastore" "github.com/milvus-io/milvus/internal/metastore"
@ -39,8 +45,16 @@ type meta struct {
sync.RWMutex sync.RWMutex
ctx context.Context ctx context.Context
catalog metastore.DataCoordCatalog catalog metastore.DataCoordCatalog
collections map[UniqueID]*datapb.CollectionInfo // collection id to collection info collections map[UniqueID]*collectionInfo // collection id to collection info
segments *SegmentsInfo // segment id to segment info segments *SegmentsInfo // segment id to segment info
}
type collectionInfo struct {
ID int64
Schema *schemapb.CollectionSchema
Partitions []int64
StartPositions []*commonpb.KeyDataPair
Properties map[string]string
} }
// NewMeta creates meta from provided `kv.TxnKV` // NewMeta creates meta from provided `kv.TxnKV`
@ -48,7 +62,7 @@ func newMeta(ctx context.Context, kv kv.TxnKV, chunkManagerRootPath string) (*me
mt := &meta{ mt := &meta{
ctx: ctx, ctx: ctx,
catalog: &datacoord.Catalog{Txn: kv, ChunkManagerRootPath: chunkManagerRootPath}, catalog: &datacoord.Catalog{Txn: kv, ChunkManagerRootPath: chunkManagerRootPath},
collections: make(map[UniqueID]*datapb.CollectionInfo), collections: make(map[UniqueID]*collectionInfo),
segments: NewSegmentsInfo(), segments: NewSegmentsInfo(),
} }
err := mt.reloadFromKV() err := mt.reloadFromKV()
@ -86,19 +100,19 @@ func (m *meta) reloadFromKV() error {
// AddCollection adds a collection into meta // AddCollection adds a collection into meta
// Note that collection info is just for caching and will not be set into etcd from datacoord // Note that collection info is just for caching and will not be set into etcd from datacoord
func (m *meta) AddCollection(collection *datapb.CollectionInfo) { func (m *meta) AddCollection(collection *collectionInfo) {
log.Info("meta update: add collection", log.Debug("meta update: add collection",
zap.Int64("collection ID", collection.GetID())) zap.Int64("collection ID", collection.ID))
m.Lock() m.Lock()
defer m.Unlock() defer m.Unlock()
m.collections[collection.ID] = collection m.collections[collection.ID] = collection
metrics.DataCoordNumCollections.WithLabelValues().Set(float64(len(m.collections))) metrics.DataCoordNumCollections.WithLabelValues().Set(float64(len(m.collections)))
log.Info("meta update: add collection - complete", log.Debug("meta update: add collection - complete",
zap.Int64("collection ID", collection.GetID())) zap.Int64("collection ID", collection.ID))
} }
// GetCollection returns collection info with provided collection id from local cache // GetCollection returns collection info with provided collection id from local cache
func (m *meta) GetCollection(collectionID UniqueID) *datapb.CollectionInfo { func (m *meta) GetCollection(collectionID UniqueID) *collectionInfo {
m.RLock() m.RLock()
defer m.RUnlock() defer m.RUnlock()
collection, ok := m.collections[collectionID] collection, ok := m.collections[collectionID]
@ -108,6 +122,28 @@ func (m *meta) GetCollection(collectionID UniqueID) *datapb.CollectionInfo {
return collection return collection
} }
func (m *meta) GetClonedCollectionInfo(collectionID UniqueID) *collectionInfo {
m.RLock()
defer m.RUnlock()
coll, ok := m.collections[collectionID]
if !ok {
return nil
}
clonedProperties := make(map[string]string)
maps.Copy(clonedProperties, coll.Properties)
cloneColl := &collectionInfo{
ID: coll.ID,
Schema: proto.Clone(coll.Schema).(*schemapb.CollectionSchema),
Partitions: coll.Partitions,
StartPositions: common.CloneKeyDataPairs(coll.StartPositions),
Properties: clonedProperties,
}
return cloneColl
}
// chanPartSegments is an internal result struct, which is aggregates of SegmentInfos with same collectionID, partitionID and channelName // chanPartSegments is an internal result struct, which is aggregates of SegmentInfos with same collectionID, partitionID and channelName
type chanPartSegments struct { type chanPartSegments struct {
collectionID UniqueID collectionID UniqueID
@ -162,7 +198,7 @@ func (m *meta) GetNumRowsOfCollection(collectionID UniqueID) int64 {
// AddSegment records segment info, persisting info into kv store // AddSegment records segment info, persisting info into kv store
func (m *meta) AddSegment(segment *SegmentInfo) error { func (m *meta) AddSegment(segment *SegmentInfo) error {
log.Info("meta update: adding segment", log.Debug("meta update: adding segment",
zap.Int64("segment ID", segment.GetID())) zap.Int64("segment ID", segment.GetID()))
m.Lock() m.Lock()
defer m.Unlock() defer m.Unlock()
@ -174,14 +210,14 @@ func (m *meta) AddSegment(segment *SegmentInfo) error {
} }
m.segments.SetSegment(segment.GetID(), segment) m.segments.SetSegment(segment.GetID(), segment)
metrics.DataCoordNumSegments.WithLabelValues(segment.GetState().String()).Inc() metrics.DataCoordNumSegments.WithLabelValues(segment.GetState().String()).Inc()
log.Info("meta update: adding segment - complete", log.Debug("meta update: adding segment - complete",
zap.Int64("segment ID", segment.GetID())) zap.Int64("segment ID", segment.GetID()))
return nil return nil
} }
// DropSegment remove segment with provided id, etcd persistence also removed // DropSegment remove segment with provided id, etcd persistence also removed
func (m *meta) DropSegment(segmentID UniqueID) error { func (m *meta) DropSegment(segmentID UniqueID) error {
log.Info("meta update: dropping segment", log.Debug("meta update: dropping segment",
zap.Int64("segment ID", segmentID)) zap.Int64("segment ID", segmentID))
m.Lock() m.Lock()
defer m.Unlock() defer m.Unlock()
@ -199,7 +235,7 @@ func (m *meta) DropSegment(segmentID UniqueID) error {
} }
metrics.DataCoordNumSegments.WithLabelValues(metrics.DropedSegmentLabel).Inc() metrics.DataCoordNumSegments.WithLabelValues(metrics.DropedSegmentLabel).Inc()
m.segments.DropSegment(segmentID) m.segments.DropSegment(segmentID)
log.Info("meta update: dropping segment - complete", log.Debug("meta update: dropping segment - complete",
zap.Int64("segment ID", segmentID)) zap.Int64("segment ID", segmentID))
return nil return nil
} }
@ -238,7 +274,7 @@ func (m *meta) GetAllSegment(segID UniqueID) *SegmentInfo {
// SetState setting segment with provided ID state // SetState setting segment with provided ID state
func (m *meta) SetState(segmentID UniqueID, targetState commonpb.SegmentState) error { func (m *meta) SetState(segmentID UniqueID, targetState commonpb.SegmentState) error {
log.Info("meta update: setting segment state", log.Debug("meta update: setting segment state",
zap.Int64("segment ID", segmentID), zap.Int64("segment ID", segmentID),
zap.Any("target state", targetState)) zap.Any("target state", targetState))
m.Lock() m.Lock()
@ -274,7 +310,7 @@ func (m *meta) SetState(segmentID UniqueID, targetState commonpb.SegmentState) e
} }
// Update in-memory meta. // Update in-memory meta.
m.segments.SetState(segmentID, targetState) m.segments.SetState(segmentID, targetState)
log.Info("meta update: setting segment state - complete", log.Debug("meta update: setting segment state - complete",
zap.Int64("segment ID", segmentID), zap.Int64("segment ID", segmentID),
zap.String("target state", targetState.String())) zap.String("target state", targetState.String()))
return nil return nil
@ -282,7 +318,7 @@ func (m *meta) SetState(segmentID UniqueID, targetState commonpb.SegmentState) e
// UnsetIsImporting removes the `isImporting` flag of a segment. // UnsetIsImporting removes the `isImporting` flag of a segment.
func (m *meta) UnsetIsImporting(segmentID UniqueID) error { func (m *meta) UnsetIsImporting(segmentID UniqueID) error {
log.Info("meta update: unsetting isImport state of segment", log.Debug("meta update: unsetting isImport state of segment",
zap.Int64("segment ID", segmentID)) zap.Int64("segment ID", segmentID))
m.Lock() m.Lock()
defer m.Unlock() defer m.Unlock()
@ -303,7 +339,7 @@ func (m *meta) UnsetIsImporting(segmentID UniqueID) error {
} }
// Update in-memory meta. // Update in-memory meta.
m.segments.SetIsImporting(segmentID, false) m.segments.SetIsImporting(segmentID, false)
log.Info("meta update: unsetting isImport state of segment - complete", log.Debug("meta update: unsetting isImport state of segment - complete",
zap.Int64("segment ID", segmentID)) zap.Int64("segment ID", segmentID))
return nil return nil
} }
@ -320,7 +356,7 @@ func (m *meta) UpdateFlushSegmentsInfo(
checkpoints []*datapb.CheckPoint, checkpoints []*datapb.CheckPoint,
startPositions []*datapb.SegmentStartPosition, startPositions []*datapb.SegmentStartPosition,
) error { ) error {
log.Info("meta update: update flush segments info", log.Debug("meta update: update flush segments info",
zap.Int64("segmentId", segmentID), zap.Int64("segmentId", segmentID),
zap.Int("binlog", len(binlogs)), zap.Int("binlog", len(binlogs)),
zap.Int("stats log", len(statslogs)), zap.Int("stats log", len(statslogs)),
@ -452,7 +488,7 @@ func (m *meta) UpdateFlushSegmentsInfo(
for id, s := range modSegments { for id, s := range modSegments {
m.segments.SetSegment(id, s) m.segments.SetSegment(id, s)
} }
log.Info("meta update: update flush segments info - update flush segments info successfully", log.Debug("meta update: update flush segments info - update flush segments info successfully",
zap.Int64("segment ID", segmentID)) zap.Int64("segment ID", segmentID))
return nil return nil
} }
@ -460,7 +496,7 @@ func (m *meta) UpdateFlushSegmentsInfo(
// UpdateDropChannelSegmentInfo updates segment checkpoints and binlogs before drop // UpdateDropChannelSegmentInfo updates segment checkpoints and binlogs before drop
// reusing segment info to pass segment id, binlogs, statslog, deltalog, start position and checkpoint // reusing segment info to pass segment id, binlogs, statslog, deltalog, start position and checkpoint
func (m *meta) UpdateDropChannelSegmentInfo(channel string, segments []*SegmentInfo) error { func (m *meta) UpdateDropChannelSegmentInfo(channel string, segments []*SegmentInfo) error {
log.Info("meta update: update drop channel segment info", log.Debug("meta update: update drop channel segment info",
zap.String("channel", channel)) zap.String("channel", channel))
m.Lock() m.Lock()
defer m.Unlock() defer m.Unlock()
@ -499,11 +535,11 @@ func (m *meta) UpdateDropChannelSegmentInfo(channel string, segments []*SegmentI
} }
} }
if err != nil { if err != nil {
log.Info("meta update: update drop channel segment info failed", log.Error("meta update: update drop channel segment info failed",
zap.String("channel", channel), zap.String("channel", channel),
zap.Error(err)) zap.Error(err))
} else { } else {
log.Info("meta update: update drop channel segment info - complete", log.Debug("meta update: update drop channel segment info - complete",
zap.String("channel", channel)) zap.String("channel", channel))
} }
return err return err
@ -775,7 +811,7 @@ func (m *meta) SelectSegments(selector SegmentInfoSelector) []*SegmentInfo {
// AddAllocation add allocation in segment // AddAllocation add allocation in segment
func (m *meta) AddAllocation(segmentID UniqueID, allocation *Allocation) error { func (m *meta) AddAllocation(segmentID UniqueID, allocation *Allocation) error {
log.Info("meta update: add allocation", log.Debug("meta update: add allocation",
zap.Int64("segmentID", segmentID), zap.Int64("segmentID", segmentID),
zap.Any("allocation", allocation)) zap.Any("allocation", allocation))
m.Lock() m.Lock()
@ -799,7 +835,7 @@ func (m *meta) AddAllocation(segmentID UniqueID, allocation *Allocation) error {
} }
// Update in-memory meta. // Update in-memory meta.
m.segments.AddAllocation(segmentID, allocation) m.segments.AddAllocation(segmentID, allocation)
log.Info("meta update: add allocation - complete", log.Debug("meta update: add allocation - complete",
zap.Int64("segmentID", segmentID)) zap.Int64("segmentID", segmentID))
return nil return nil
} }
@ -912,7 +948,7 @@ func (m *meta) GetCompleteCompactionMeta(compactionLogs []*datapb.CompactionSegm
} }
segment := NewSegmentInfo(segmentInfo) segment := NewSegmentInfo(segmentInfo)
log.Info("meta update: get complete compaction meta - complete", log.Debug("meta update: get complete compaction meta - complete",
zap.Int64("segmentID", segmentInfo.ID), zap.Int64("segmentID", segmentInfo.ID),
zap.Int64("collectionID", segmentInfo.CollectionID), zap.Int64("collectionID", segmentInfo.CollectionID),
zap.Int64("partitionID", segmentInfo.PartitionID), zap.Int64("partitionID", segmentInfo.PartitionID),
@ -927,7 +963,7 @@ func (m *meta) alterMetaStoreAfterCompaction(modSegments []*datapb.SegmentInfo,
} }
func (m *meta) revertAlterMetaStoreAfterCompaction(oldSegments []*datapb.SegmentInfo, removalSegment *datapb.SegmentInfo) error { func (m *meta) revertAlterMetaStoreAfterCompaction(oldSegments []*datapb.SegmentInfo, removalSegment *datapb.SegmentInfo) error {
log.Info("revert metastore after compaction failure", log.Debug("revert metastore after compaction failure",
zap.Int64("collectionID", removalSegment.CollectionID), zap.Int64("collectionID", removalSegment.CollectionID),
zap.Int64("partitionID", removalSegment.PartitionID), zap.Int64("partitionID", removalSegment.PartitionID),
zap.Int64("compactedTo", removalSegment.ID), zap.Int64("compactedTo", removalSegment.ID),
@ -941,7 +977,7 @@ func (m *meta) alterInMemoryMetaAfterCompaction(segmentCompactTo *SegmentInfo, s
for _, v := range segmentsCompactFrom { for _, v := range segmentsCompactFrom {
compactFromIDs = append(compactFromIDs, v.GetID()) compactFromIDs = append(compactFromIDs, v.GetID())
} }
log.Info("meta update: alter in memory meta after compaction", log.Debug("meta update: alter in memory meta after compaction",
zap.Int64("compact to segment ID", segmentCompactTo.GetID()), zap.Int64("compact to segment ID", segmentCompactTo.GetID()),
zap.Int64s("compact from segment IDs", compactFromIDs)) zap.Int64s("compact from segment IDs", compactFromIDs))
m.Lock() m.Lock()
@ -955,7 +991,7 @@ func (m *meta) alterInMemoryMetaAfterCompaction(segmentCompactTo *SegmentInfo, s
if segmentCompactTo.GetNumOfRows() > 0 { if segmentCompactTo.GetNumOfRows() > 0 {
m.segments.SetSegment(segmentCompactTo.GetID(), segmentCompactTo) m.segments.SetSegment(segmentCompactTo.GetID(), segmentCompactTo)
} }
log.Info("meta update: alter in memory meta after compaction - complete", log.Debug("meta update: alter in memory meta after compaction - complete",
zap.Int64("compact to segment ID", segmentCompactTo.GetID()), zap.Int64("compact to segment ID", segmentCompactTo.GetID()),
zap.Int64s("compact from segment IDs", compactFromIDs)) zap.Int64s("compact from segment IDs", compactFromIDs))
} }

View File

@ -24,6 +24,8 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/milvus-io/milvus/internal/common"
"github.com/golang/protobuf/proto" "github.com/golang/protobuf/proto"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/kv" "github.com/milvus-io/milvus/internal/kv"
@ -245,12 +247,16 @@ func TestMeta_Basic(t *testing.T) {
assert.Nil(t, err) assert.Nil(t, err)
testSchema := newTestSchema() testSchema := newTestSchema()
collInfo := &datapb.CollectionInfo{
ID: collID, Params.Init()
Schema: testSchema,
Partitions: []UniqueID{partID0, partID1}, collInfo := &collectionInfo{
ID: collID,
Schema: testSchema,
Partitions: []UniqueID{partID0, partID1},
StartPositions: []*commonpb.KeyDataPair{},
} }
collInfoWoPartition := &datapb.CollectionInfo{ collInfoWoPartition := &collectionInfo{
ID: collID, ID: collID,
Schema: testSchema, Schema: testSchema,
Partitions: []UniqueID{}, Partitions: []UniqueID{},
@ -424,6 +430,29 @@ func TestMeta_Basic(t *testing.T) {
result = meta.GetSegmentsChanPart(func(seg *SegmentInfo) bool { return seg.GetCollectionID() == 10 }) result = meta.GetSegmentsChanPart(func(seg *SegmentInfo) bool { return seg.GetCollectionID() == 10 })
assert.Equal(t, 0, len(result)) assert.Equal(t, 0, len(result))
}) })
t.Run("GetClonedCollectionInfo", func(t *testing.T) {
// collection does not exist
ret := meta.GetClonedCollectionInfo(-1)
assert.Nil(t, ret)
collInfo.Properties = map[string]string{
common.CollectionTTLConfigKey: "3600",
}
meta.AddCollection(collInfo)
ret = meta.GetClonedCollectionInfo(collInfo.ID)
equalCollectionInfo(t, collInfo, ret)
collInfo.StartPositions = []*commonpb.KeyDataPair{
{
Key: "k",
Data: []byte("v"),
},
}
meta.AddCollection(collInfo)
ret = meta.GetClonedCollectionInfo(collInfo.ID)
equalCollectionInfo(t, collInfo, ret)
})
} }
func TestGetUnFlushedSegments(t *testing.T) { func TestGetUnFlushedSegments(t *testing.T) {
@ -953,3 +982,11 @@ func TestMeta_isSegmentHealthy_issue17823_panic(t *testing.T) {
assert.False(t, isSegmentHealthy(seg)) assert.False(t, isSegmentHealthy(seg))
} }
func equalCollectionInfo(t *testing.T, a *collectionInfo, b *collectionInfo) {
assert.Equal(t, a.ID, b.ID)
assert.Equal(t, a.Partitions, b.Partitions)
assert.Equal(t, a.Schema, b.Schema)
assert.Equal(t, a.Properties, b.Properties)
assert.Equal(t, a.StartPositions, b.StartPositions)
}

View File

@ -59,6 +59,17 @@ func (m *MockAllocator) allocID(ctx context.Context) (UniqueID, error) {
return val, nil return val, nil
} }
type MockAllocator0 struct {
}
func (m *MockAllocator0) allocTimestamp(ctx context.Context) (Timestamp, error) {
return Timestamp(0), nil
}
func (m *MockAllocator0) allocID(ctx context.Context) (UniqueID, error) {
return 0, nil
}
var _ allocator = (*FailsAllocator)(nil) var _ allocator = (*FailsAllocator)(nil)
// FailsAllocator allocator that fails // FailsAllocator allocator that fails
@ -360,6 +371,10 @@ func (m *mockRootCoordService) ShowCollections(ctx context.Context, req *milvusp
}, nil }, nil
} }
func (m *mockRootCoordService) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
panic("not implemented") // TODO: Implement
}
func (m *mockRootCoordService) CreatePartition(ctx context.Context, req *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (m *mockRootCoordService) CreatePartition(ctx context.Context, req *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
panic("not implemented") // TODO: Implement panic("not implemented") // TODO: Implement
} }
@ -613,30 +628,30 @@ type mockCompactionTrigger struct {
} }
// triggerCompaction trigger a compaction if any compaction condition satisfy. // triggerCompaction trigger a compaction if any compaction condition satisfy.
func (t *mockCompactionTrigger) triggerCompaction(ct *compactTime) error { func (t *mockCompactionTrigger) triggerCompaction() error {
if f, ok := t.methods["triggerCompaction"]; ok { if f, ok := t.methods["triggerCompaction"]; ok {
if ff, ok := f.(func(ct *compactTime) error); ok { if ff, ok := f.(func() error); ok {
return ff(ct) return ff()
} }
} }
panic("not implemented") panic("not implemented")
} }
// triggerSingleCompaction trigerr a compaction bundled with collection-partiiton-channel-segment // triggerSingleCompaction trigerr a compaction bundled with collection-partiiton-channel-segment
func (t *mockCompactionTrigger) triggerSingleCompaction(collectionID int64, partitionID int64, segmentID int64, channel string, ct *compactTime) error { func (t *mockCompactionTrigger) triggerSingleCompaction(collectionID, partitionID, segmentID int64, channel string) error {
if f, ok := t.methods["triggerSingleCompaction"]; ok { if f, ok := t.methods["triggerSingleCompaction"]; ok {
if ff, ok := f.(func(collectionID int64, partitionID int64, segmentID int64, channel string, ct *compactTime) error); ok { if ff, ok := f.(func(collectionID int64, partitionID int64, segmentID int64, channel string) error); ok {
return ff(collectionID, partitionID, segmentID, channel, ct) return ff(collectionID, partitionID, segmentID, channel)
} }
} }
panic("not implemented") panic("not implemented")
} }
// forceTriggerCompaction force to start a compaction // forceTriggerCompaction force to start a compaction
func (t *mockCompactionTrigger) forceTriggerCompaction(collectionID int64, ct *compactTime) (UniqueID, error) { func (t *mockCompactionTrigger) forceTriggerCompaction(collectionID int64) (UniqueID, error) {
if f, ok := t.methods["forceTriggerCompaction"]; ok { if f, ok := t.methods["forceTriggerCompaction"]; ok {
if ff, ok := f.(func(collectionID int64, ct *compactTime) (UniqueID, error)); ok { if ff, ok := f.(func(collectionID int64) (UniqueID, error)); ok {
return ff(collectionID, ct) return ff(collectionID)
} }
} }
panic("not implemented") panic("not implemented")

View File

@ -103,7 +103,7 @@ func TestAllocSegment(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(ctx) collID, err := mockAllocator.allocID(ctx)
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
t.Run("normal allocation", func(t *testing.T) { t.Run("normal allocation", func(t *testing.T) {
allocations, err := segmentManager.AllocSegment(ctx, collID, 100, "c1", 100) allocations, err := segmentManager.AllocSegment(ctx, collID, 100, "c1", 100)
@ -145,7 +145,7 @@ func TestAllocSegmentForImport(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(ctx) collID, err := mockAllocator.allocID(ctx)
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
t.Run("normal allocation", func(t *testing.T) { t.Run("normal allocation", func(t *testing.T) {
allocation, err := segmentManager.allocSegmentForImport(ctx, collID, 100, "c1", 100, 0) allocation, err := segmentManager.allocSegmentForImport(ctx, collID, 100, "c1", 100, 0)
@ -191,7 +191,7 @@ func TestLoadSegmentsFromMeta(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(ctx) collID, err := mockAllocator.allocID(ctx)
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
sealedSegment := &datapb.SegmentInfo{ sealedSegment := &datapb.SegmentInfo{
ID: 1, ID: 1,
@ -241,7 +241,7 @@ func TestSaveSegmentsToMeta(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil) segmentManager := newSegmentManager(meta, mockAllocator, nil)
allocations, err := segmentManager.AllocSegment(context.Background(), collID, 0, "c1", 1000) allocations, err := segmentManager.AllocSegment(context.Background(), collID, 0, "c1", 1000)
assert.Nil(t, err) assert.Nil(t, err)
@ -263,7 +263,7 @@ func TestSaveSegmentsToMetaWithSpecificSegments(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil) segmentManager := newSegmentManager(meta, mockAllocator, nil)
allocations, err := segmentManager.AllocSegment(context.Background(), collID, 0, "c1", 1000) allocations, err := segmentManager.AllocSegment(context.Background(), collID, 0, "c1", 1000)
assert.Nil(t, err) assert.Nil(t, err)
@ -285,7 +285,7 @@ func TestDropSegment(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil) segmentManager := newSegmentManager(meta, mockAllocator, nil)
allocations, err := segmentManager.AllocSegment(context.Background(), collID, 0, "c1", 1000) allocations, err := segmentManager.AllocSegment(context.Background(), collID, 0, "c1", 1000)
assert.Nil(t, err) assert.Nil(t, err)
@ -308,7 +308,7 @@ func TestAllocRowsLargerThanOneSegment(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
var mockPolicy = func(schema *schemapb.CollectionSchema) (int, error) { var mockPolicy = func(schema *schemapb.CollectionSchema) (int, error) {
return 1, nil return 1, nil
@ -330,7 +330,7 @@ func TestExpireAllocation(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
var mockPolicy = func(schema *schemapb.CollectionSchema) (int, error) { var mockPolicy = func(schema *schemapb.CollectionSchema) (int, error) {
return 10000000, nil return 10000000, nil
@ -373,7 +373,7 @@ func TestGetFlushableSegments(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil) segmentManager := newSegmentManager(meta, mockAllocator, nil)
allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2)
assert.Nil(t, err) assert.Nil(t, err)
@ -419,7 +419,7 @@ func TestTryToSealSegment(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64))) //always seal segmentManager := newSegmentManager(meta, mockAllocator, nil, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64))) //always seal
allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2)
assert.Nil(t, err) assert.Nil(t, err)
@ -444,7 +444,7 @@ func TestTryToSealSegment(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil, withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal segmentManager := newSegmentManager(meta, mockAllocator, nil, withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal
allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2)
assert.Nil(t, err) assert.Nil(t, err)
@ -469,7 +469,7 @@ func TestTryToSealSegment(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil, segmentManager := newSegmentManager(meta, mockAllocator, nil,
withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64)), withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64)),
withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal
@ -499,7 +499,7 @@ func TestTryToSealSegment(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64))) //always seal segmentManager := newSegmentManager(meta, mockAllocator, nil, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64))) //always seal
allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2)
assert.Nil(t, err) assert.Nil(t, err)
@ -525,7 +525,7 @@ func TestTryToSealSegment(t *testing.T) {
schema := newTestSchema() schema := newTestSchema()
collID, err := mockAllocator.allocID(context.Background()) collID, err := mockAllocator.allocID(context.Background())
assert.Nil(t, err) assert.Nil(t, err)
meta.AddCollection(&datapb.CollectionInfo{ID: collID, Schema: schema}) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema})
segmentManager := newSegmentManager(meta, mockAllocator, nil, withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal segmentManager := newSegmentManager(meta, mockAllocator, nil, withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal
allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2)
assert.Nil(t, err) assert.Nil(t, err)

View File

@ -28,7 +28,6 @@ import (
"time" "time"
"github.com/blang/semver/v4" "github.com/blang/semver/v4"
"github.com/minio/minio-go/v7"
clientv3 "go.etcd.io/etcd/client/v3" clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap" "go.uber.org/zap"
@ -394,23 +393,6 @@ func (s *Server) initGarbageCollection(cli storage.ChunkManager) {
}) })
} }
// here we use variable for test convenience
var getCheckBucketFn = func(cli *minio.Client) func() error {
return func() error {
has, err := cli.BucketExists(context.TODO(), Params.MinioCfg.BucketName)
if err != nil {
return err
}
if !has {
err = cli.MakeBucket(context.TODO(), Params.MinioCfg.BucketName, minio.MakeBucketOptions{})
if err != nil {
return err
}
}
return nil
}
}
func (s *Server) initServiceDiscovery() error { func (s *Server) initServiceDiscovery() error {
r := semver.MustParseRange(">=2.1.2") r := semver.MustParseRange(">=2.1.2")
sessions, rev, err := s.session.GetSessionsWithVersionRange(typeutil.DataNodeRole, r) sessions, rev, err := s.session.GetSessionsWithVersionRange(typeutil.DataNodeRole, r)
@ -925,11 +907,18 @@ func (s *Server) loadCollectionFromRootCoord(ctx context.Context, collectionID i
zap.Int64("collectionID", resp.CollectionID), zap.Error(err)) zap.Int64("collectionID", resp.CollectionID), zap.Error(err))
return err return err
} }
collInfo := &datapb.CollectionInfo{
properties := make(map[string]string)
for _, pair := range resp.Properties {
properties[pair.GetKey()] = pair.GetValue()
}
collInfo := &collectionInfo{
ID: resp.CollectionID, ID: resp.CollectionID,
Schema: resp.Schema, Schema: resp.Schema,
Partitions: presp.PartitionIDs, Partitions: presp.PartitionIDs,
StartPositions: resp.GetStartPositions(), StartPositions: resp.GetStartPositions(),
Properties: properties,
} }
s.meta.AddCollection(collInfo) s.meta.AddCollection(collInfo)
return nil return nil

View File

@ -29,6 +29,16 @@ import (
"testing" "testing"
"time" "time"
"github.com/milvus-io/milvus/internal/mocks"
"github.com/milvus-io/milvus/internal/util/funcutil"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb" "github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/api/schemapb" "github.com/milvus-io/milvus/api/schemapb"
@ -36,7 +46,6 @@ import (
"github.com/milvus-io/milvus/internal/kv" "github.com/milvus-io/milvus/internal/kv"
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd" etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
"github.com/milvus-io/milvus/internal/log" "github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/mocks"
"github.com/milvus-io/milvus/internal/mq/msgstream" "github.com/milvus-io/milvus/internal/mq/msgstream"
"github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/indexpb"
@ -46,16 +55,8 @@ import (
"github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/internal/util/dependency"
"github.com/milvus-io/milvus/internal/util/etcd" "github.com/milvus-io/milvus/internal/util/etcd"
"github.com/milvus-io/milvus/internal/util/funcutil"
"github.com/milvus-io/milvus/internal/util/metricsinfo" "github.com/milvus-io/milvus/internal/util/metricsinfo"
"github.com/milvus-io/milvus/internal/util/sessionutil" "github.com/milvus-io/milvus/internal/util/sessionutil"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/minio/minio-go/v7"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap"
) )
func TestMain(m *testing.M) { func TestMain(m *testing.M) {
@ -84,7 +85,7 @@ func TestAssignSegmentID(t *testing.T) {
svr := newTestServer(t, nil) svr := newTestServer(t, nil)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
schema := newTestSchema() schema := newTestSchema()
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: collID, ID: collID,
Schema: schema, Schema: schema,
Partitions: []int64{}, Partitions: []int64{},
@ -115,7 +116,7 @@ func TestAssignSegmentID(t *testing.T) {
svr := newTestServer(t, nil) svr := newTestServer(t, nil)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
schema := newTestSchema() schema := newTestSchema()
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: collID, ID: collID,
Schema: schema, Schema: schema,
Partitions: []int64{}, Partitions: []int64{},
@ -170,7 +171,7 @@ func TestAssignSegmentID(t *testing.T) {
collID: collID, collID: collID,
} }
schema := newTestSchema() schema := newTestSchema()
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: collID, ID: collID,
Schema: schema, Schema: schema,
Partitions: []int64{}, Partitions: []int64{},
@ -231,7 +232,7 @@ func TestFlush(t *testing.T) {
svr := newTestServer(t, nil) svr := newTestServer(t, nil)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
schema := newTestSchema() schema := newTestSchema()
svr.meta.AddCollection(&datapb.CollectionInfo{ID: 0, Schema: schema, Partitions: []int64{}}) svr.meta.AddCollection(&collectionInfo{ID: 0, Schema: schema, Partitions: []int64{}})
allocations, err := svr.segmentManager.AllocSegment(context.TODO(), 0, 1, "channel-1", 1) allocations, err := svr.segmentManager.AllocSegment(context.TODO(), 0, 1, "channel-1", 1)
assert.Nil(t, err) assert.Nil(t, err)
assert.EqualValues(t, 1, len(allocations)) assert.EqualValues(t, 1, len(allocations))
@ -1108,7 +1109,7 @@ func TestSaveBinlogPaths(t *testing.T) {
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
// vecFieldID := int64(201) // vecFieldID := int64(201)
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 0, ID: 0,
}) })
@ -1226,7 +1227,7 @@ func TestSaveBinlogPaths(t *testing.T) {
svr := newTestServer(t, nil, SetSegmentManager(&spySegmentManager{spyCh: spyCh})) svr := newTestServer(t, nil, SetSegmentManager(&spySegmentManager{spyCh: spyCh}))
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
svr.meta.AddCollection(&datapb.CollectionInfo{ID: 1}) svr.meta.AddCollection(&collectionInfo{ID: 1})
err := svr.meta.AddSegment(&SegmentInfo{ err := svr.meta.AddSegment(&SegmentInfo{
Segment: &datapb.SegmentInfo{ Segment: &datapb.SegmentInfo{
ID: 1, ID: 1,
@ -1259,7 +1260,7 @@ func TestDropVirtualChannel(t *testing.T) {
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
vecFieldID := int64(201) vecFieldID := int64(201)
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 0, ID: 0,
Schema: &schemapb.CollectionSchema{ Schema: &schemapb.CollectionSchema{
Fields: []*schemapb.FieldSchema{ Fields: []*schemapb.FieldSchema{
@ -1445,7 +1446,7 @@ func TestDataNodeTtChannel(t *testing.T) {
svr := newTestServer(t, ch) svr := newTestServer(t, ch)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 0, ID: 0,
Schema: newTestSchema(), Schema: newTestSchema(),
Partitions: []int64{0}, Partitions: []int64{0},
@ -1514,7 +1515,7 @@ func TestDataNodeTtChannel(t *testing.T) {
ch := make(chan any, 1) ch := make(chan any, 1)
svr := newTestServer(t, ch) svr := newTestServer(t, ch)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 0, ID: 0,
Schema: newTestSchema(), Schema: newTestSchema(),
Partitions: []int64{0}, Partitions: []int64{0},
@ -1595,7 +1596,7 @@ func TestDataNodeTtChannel(t *testing.T) {
svr := newTestServer(t, nil, SetServerHelper(helper)) svr := newTestServer(t, nil, SetServerHelper(helper))
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 0, ID: 0,
Schema: newTestSchema(), Schema: newTestSchema(),
Partitions: []int64{0}, Partitions: []int64{0},
@ -1649,7 +1650,7 @@ func TestGetVChannelPos(t *testing.T) {
svr := newTestServer(t, nil) svr := newTestServer(t, nil)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
schema := newTestSchema() schema := newTestSchema()
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 0, ID: 0,
Schema: schema, Schema: schema,
StartPositions: []*commonpb.KeyDataPair{ StartPositions: []*commonpb.KeyDataPair{
@ -1659,7 +1660,7 @@ func TestGetVChannelPos(t *testing.T) {
}, },
}, },
}) })
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 1, ID: 1,
Schema: schema, Schema: schema,
StartPositions: []*commonpb.KeyDataPair{ StartPositions: []*commonpb.KeyDataPair{
@ -1792,7 +1793,7 @@ func TestShouldDropChannel(t *testing.T) {
svr := newTestServer(t, nil) svr := newTestServer(t, nil)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
schema := newTestSchema() schema := newTestSchema()
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 0, ID: 0,
Schema: schema, Schema: schema,
StartPositions: []*commonpb.KeyDataPair{ StartPositions: []*commonpb.KeyDataPair{
@ -1802,7 +1803,7 @@ func TestShouldDropChannel(t *testing.T) {
}, },
}, },
}) })
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
ID: 1, ID: 1,
Schema: schema, Schema: schema,
StartPositions: []*commonpb.KeyDataPair{ StartPositions: []*commonpb.KeyDataPair{
@ -1988,7 +1989,7 @@ func TestGetRecoveryInfo(t *testing.T) {
return newMockRootCoordService(), nil return newMockRootCoordService(), nil
} }
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
Schema: newTestSchema(), Schema: newTestSchema(),
}) })
seg1 := createSegment(0, 0, 0, 100, 10, "vchan1", commonpb.SegmentState_Flushed) seg1 := createSegment(0, 0, 0, 100, 10, "vchan1", commonpb.SegmentState_Flushed)
@ -2048,6 +2049,11 @@ func TestGetRecoveryInfo(t *testing.T) {
return newMockRootCoordService(), nil return newMockRootCoordService(), nil
} }
svr.meta.AddCollection(&collectionInfo{
ID: 0,
Schema: newTestSchema(),
})
seg1 := createSegment(3, 0, 0, 100, 30, "vchan1", commonpb.SegmentState_Growing) seg1 := createSegment(3, 0, 0, 100, 30, "vchan1", commonpb.SegmentState_Growing)
seg2 := createSegment(4, 0, 0, 100, 40, "vchan1", commonpb.SegmentState_Growing) seg2 := createSegment(4, 0, 0, 100, 40, "vchan1", commonpb.SegmentState_Growing)
err := svr.meta.AddSegment(NewSegmentInfo(seg1)) err := svr.meta.AddSegment(NewSegmentInfo(seg1))
@ -2073,7 +2079,7 @@ func TestGetRecoveryInfo(t *testing.T) {
svr := newTestServer(t, nil) svr := newTestServer(t, nil)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
svr.meta.AddCollection(&datapb.CollectionInfo{ svr.meta.AddCollection(&collectionInfo{
Schema: newTestSchema(), Schema: newTestSchema(),
}) })
@ -2177,6 +2183,11 @@ func TestGetRecoveryInfo(t *testing.T) {
return newMockRootCoordService(), nil return newMockRootCoordService(), nil
} }
svr.meta.AddCollection(&collectionInfo{
ID: 0,
Schema: newTestSchema(),
})
seg1 := createSegment(7, 0, 0, 100, 30, "vchan1", commonpb.SegmentState_Growing) seg1 := createSegment(7, 0, 0, 100, 30, "vchan1", commonpb.SegmentState_Growing)
seg2 := createSegment(8, 0, 0, 100, 40, "vchan1", commonpb.SegmentState_Dropped) seg2 := createSegment(8, 0, 0, 100, 40, "vchan1", commonpb.SegmentState_Dropped)
err := svr.meta.AddSegment(NewSegmentInfo(seg1)) err := svr.meta.AddSegment(NewSegmentInfo(seg1))
@ -2282,7 +2293,7 @@ func TestManualCompaction(t *testing.T) {
svr.stateCode.Store(commonpb.StateCode_Healthy) svr.stateCode.Store(commonpb.StateCode_Healthy)
svr.compactionTrigger = &mockCompactionTrigger{ svr.compactionTrigger = &mockCompactionTrigger{
methods: map[string]interface{}{ methods: map[string]interface{}{
"forceTriggerCompaction": func(collectionID int64, ct *compactTime) (UniqueID, error) { "forceTriggerCompaction": func(collectionID int64) (UniqueID, error) {
return 1, nil return 1, nil
}, },
}, },
@ -2301,7 +2312,7 @@ func TestManualCompaction(t *testing.T) {
svr.stateCode.Store(commonpb.StateCode_Healthy) svr.stateCode.Store(commonpb.StateCode_Healthy)
svr.compactionTrigger = &mockCompactionTrigger{ svr.compactionTrigger = &mockCompactionTrigger{
methods: map[string]interface{}{ methods: map[string]interface{}{
"forceTriggerCompaction": func(collectionID int64, ct *compactTime) (UniqueID, error) { "forceTriggerCompaction": func(collectionID int64) (UniqueID, error) {
return 0, errors.New("mock error") return 0, errors.New("mock error")
}, },
}, },
@ -2320,7 +2331,7 @@ func TestManualCompaction(t *testing.T) {
svr.stateCode.Store(commonpb.StateCode_Abnormal) svr.stateCode.Store(commonpb.StateCode_Abnormal)
svr.compactionTrigger = &mockCompactionTrigger{ svr.compactionTrigger = &mockCompactionTrigger{
methods: map[string]interface{}{ methods: map[string]interface{}{
"forceTriggerCompaction": func(collectionID int64, ct *compactTime) (UniqueID, error) { "forceTriggerCompaction": func(collectionID int64) (UniqueID, error) {
return 1, nil return 1, nil
}, },
}, },
@ -2905,6 +2916,9 @@ func TestDataCoord_SaveImportSegment(t *testing.T) {
t.Run("test add segment", func(t *testing.T) { t.Run("test add segment", func(t *testing.T) {
svr := newTestServer(t, nil) svr := newTestServer(t, nil)
defer closeTestServer(t, svr) defer closeTestServer(t, svr)
svr.meta.AddCollection(&collectionInfo{
ID: 100,
})
seg := buildSegment(100, 100, 100, "ch1", false) seg := buildSegment(100, 100, 100, "ch1", false)
svr.meta.AddSegment(seg) svr.meta.AddSegment(seg)
svr.sessionManager.AddSession(&NodeInfo{ svr.sessionManager.AddSession(&NodeInfo{
@ -3249,27 +3263,6 @@ func Test_newChunkManagerFactory(t *testing.T) {
assert.Contains(t, err.Error(), "too many colons in address") assert.Contains(t, err.Error(), "too many colons in address")
}) })
// mock CheckBucketFn
getCheckBucketFnBak := getCheckBucketFn
getCheckBucketFn = func(cli *minio.Client) func() error {
return func() error { return nil }
}
defer func() {
getCheckBucketFn = getCheckBucketFnBak
}()
Params.MinioCfg.Address = "minio:9000"
t.Run("ok", func(t *testing.T) {
storageCli, err := server.newChunkManagerFactory()
assert.NotNil(t, storageCli)
assert.NoError(t, err)
})
t.Run("iam_ok", func(t *testing.T) {
Params.CommonCfg.StorageType = "minio"
Params.MinioCfg.UseIAM = true
storageCli, err := server.newChunkManagerFactory()
assert.Nil(t, storageCli)
assert.Error(t, err)
})
t.Run("local storage init", func(t *testing.T) { t.Run("local storage init", func(t *testing.T) {
Params.CommonCfg.StorageType = "local" Params.CommonCfg.StorageType = "local"
storageCli, err := server.newChunkManagerFactory() storageCli, err := server.newChunkManagerFactory()
@ -3288,16 +3281,6 @@ func Test_initGarbageCollection(t *testing.T) {
server := newTestServer2(t, nil) server := newTestServer2(t, nil)
Params.DataCoordCfg.EnableGarbageCollection = true Params.DataCoordCfg.EnableGarbageCollection = true
// mock CheckBucketFn
getCheckBucketFnBak := getCheckBucketFn
getCheckBucketFn = func(cli *minio.Client) func() error {
return func() error { return nil }
}
defer func() {
getCheckBucketFn = getCheckBucketFnBak
}()
Params.MinioCfg.Address = "minio:9000"
t.Run("ok", func(t *testing.T) { t.Run("ok", func(t *testing.T) {
storageCli, err := server.newChunkManagerFactory() storageCli, err := server.newChunkManagerFactory()
assert.NotNil(t, storageCli) assert.NotNil(t, storageCli)

View File

@ -21,7 +21,6 @@ import (
"fmt" "fmt"
"math/rand" "math/rand"
"strconv" "strconv"
"time"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb" "github.com/milvus-io/milvus/api/milvuspb"
@ -439,20 +438,12 @@ func (s *Server) SaveBinlogPaths(ctx context.Context, req *datapb.SaveBinlogPath
s.flushCh <- req.SegmentID s.flushCh <- req.SegmentID
if !req.Importing && Params.DataCoordCfg.EnableCompaction { if !req.Importing && Params.DataCoordCfg.EnableCompaction {
cctx, cancel := context.WithTimeout(s.ctx, 5*time.Second) err = s.compactionTrigger.triggerSingleCompaction(segment.GetCollectionID(), segment.GetPartitionID(),
defer cancel() segmentID, segment.GetInsertChannel())
if err != nil {
ct, err := GetCompactTime(cctx, s.allocator) log.Warn("failed to trigger single compaction", zap.Int64("segment ID", segmentID))
if err == nil {
err = s.compactionTrigger.triggerSingleCompaction(segment.GetCollectionID(),
segment.GetPartitionID(), segmentID, segment.GetInsertChannel(), ct)
if err != nil {
log.Warn("failed to trigger single compaction", zap.Int64("segment ID", segmentID))
} else {
log.Info("compaction triggered for segment", zap.Int64("segment ID", segmentID))
}
} else { } else {
log.Warn("failed to get time travel reverse time") log.Info("compaction triggered for segment", zap.Int64("segment ID", segmentID))
} }
} }
} }
@ -916,14 +907,7 @@ func (s *Server) ManualCompaction(ctx context.Context, req *milvuspb.ManualCompa
return resp, nil return resp, nil
} }
ct, err := GetCompactTime(ctx, s.allocator) id, err := s.compactionTrigger.forceTriggerCompaction(req.CollectionID)
if err != nil {
log.Warn("failed to get compact time", zap.Int64("collectionID", req.GetCollectionID()), zap.Error(err))
resp.Status.Reason = err.Error()
return resp, nil
}
id, err := s.compactionTrigger.forceTriggerCompaction(req.CollectionID, ct)
if err != nil { if err != nil {
log.Error("failed to trigger manual compaction", zap.Int64("collectionID", req.GetCollectionID()), zap.Error(err)) log.Error("failed to trigger manual compaction", zap.Int64("collectionID", req.GetCollectionID()), zap.Error(err))
resp.Status.Reason = err.Error() resp.Status.Reason = err.Error()
@ -1374,3 +1358,46 @@ func (s *Server) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmen
ErrorCode: commonpb.ErrorCode_Success, ErrorCode: commonpb.ErrorCode_Success,
}, nil }, nil
} }
func (s *Server) BroadCastAlteredCollection(ctx context.Context,
req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
errResp := &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: "",
}
if s.isClosed() {
log.Warn("failed to broadcast collection information for closed server")
errResp.Reason = msgDataCoordIsUnhealthy(Params.DataCoordCfg.GetNodeID())
return errResp, nil
}
// get collection info from cache
clonedColl := s.meta.GetClonedCollectionInfo(req.CollectionID)
// try to reload collection from RootCoord
if clonedColl == nil {
err := s.loadCollectionFromRootCoord(ctx, req.CollectionID)
if err != nil {
log.Warn("failed to load collection from rootcoord", zap.Int64("collectionID", req.CollectionID), zap.Error(err))
errResp.Reason = fmt.Sprintf("failed to load collection from rootcoord, collectionID:%d", req.CollectionID)
return errResp, nil
}
}
clonedColl = s.meta.GetClonedCollectionInfo(req.CollectionID)
if clonedColl == nil {
return nil, fmt.Errorf("get collection from cache failed, collectionID:%d", req.CollectionID)
}
properties := make(map[string]string)
for _, pair := range req.Properties {
properties[pair.GetKey()] = pair.GetValue()
}
clonedColl.Properties = properties
s.meta.AddCollection(clonedColl)
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}

View File

@ -19,9 +19,12 @@ package datacoord
import ( import (
"context" "context"
"errors" "errors"
"strconv"
"sync" "sync"
"time" "time"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/schemapb" "github.com/milvus-io/milvus/api/schemapb"
"github.com/milvus-io/milvus/internal/log" "github.com/milvus-io/milvus/internal/log"
@ -93,10 +96,10 @@ func GetCompactTime(ctx context.Context, allocator allocator) (*compactTime, err
if Params.CommonCfg.EntityExpirationTTL > 0 { if Params.CommonCfg.EntityExpirationTTL > 0 {
ttexpired := pts.Add(-Params.CommonCfg.EntityExpirationTTL) ttexpired := pts.Add(-Params.CommonCfg.EntityExpirationTTL)
ttexpiredLogic := tsoutil.ComposeTS(ttexpired.UnixNano()/int64(time.Millisecond), 0) ttexpiredLogic := tsoutil.ComposeTS(ttexpired.UnixNano()/int64(time.Millisecond), 0)
return &compactTime{ttRetentionLogic, ttexpiredLogic}, nil return &compactTime{ttRetentionLogic, ttexpiredLogic, Params.CommonCfg.EntityExpirationTTL}, nil
} }
// no expiration time // no expiration time
return &compactTime{ttRetentionLogic, 0}, nil return &compactTime{ttRetentionLogic, 0, 0}, nil
} }
func FilterInIndexedSegments(meta *meta, indexCoord types.IndexCoord, segments ...*SegmentInfo) []*SegmentInfo { func FilterInIndexedSegments(meta *meta, indexCoord types.IndexCoord, segments ...*SegmentInfo) []*SegmentInfo {
@ -115,7 +118,7 @@ func FilterInIndexedSegments(meta *meta, indexCoord types.IndexCoord, segments .
collectionSegments[collectionID] = append(collectionSegments[collectionID], segment.GetID()) collectionSegments[collectionID] = append(collectionSegments[collectionID], segment.GetID())
} }
for collection := range collectionSegments { for collection := range collectionSegments {
schema := meta.GetCollection(collection).GetSchema() schema := meta.GetCollection(collection).Schema
for _, field := range schema.GetFields() { for _, field := range schema.GetFields() {
if field.GetDataType() == schemapb.DataType_BinaryVector || if field.GetDataType() == schemapb.DataType_BinaryVector ||
field.GetDataType() == schemapb.DataType_FloatVector { field.GetDataType() == schemapb.DataType_FloatVector {
@ -190,3 +193,17 @@ func getZeroTime() time.Time {
var t time.Time var t time.Time
return t return t
} }
// getCollectionTTL returns ttl if collection's ttl is specified, or return global ttl
func getCollectionTTL(properties map[string]string) (time.Duration, error) {
v, ok := properties[common.CollectionTTLConfigKey]
if ok {
ttl, err := strconv.Atoi(v)
if err != nil {
return -1, err
}
return time.Duration(ttl) * time.Second, nil
}
return Params.CommonCfg.EntityExpirationTTL, nil
}

View File

@ -23,6 +23,7 @@ import (
"time" "time"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb"
"github.com/milvus-io/milvus/internal/util/tsoutil" "github.com/milvus-io/milvus/internal/util/tsoutil"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -134,7 +135,7 @@ func (suite *UtilSuite) TestGetCompactTime() {
{ {
"test get timetravel", "test get timetravel",
args{&fixedTSOAllocator{fixedTime: tFixed}}, args{&fixedTSOAllocator{fixedTime: tFixed}},
&compactTime{tsoutil.ComposeTS(tBefore.UnixNano()/int64(time.Millisecond), 0), 0}, &compactTime{tsoutil.ComposeTS(tBefore.UnixNano()/int64(time.Millisecond), 0), 0, 0},
false, false,
}, },
} }
@ -170,3 +171,26 @@ func (suite *UtilSuite) TestGetZeroTime() {
suite.True(timeGot.IsZero()) suite.True(timeGot.IsZero())
} }
} }
func (suite *UtilSuite) TestGetCollectionTTL() {
properties1 := map[string]string{
common.CollectionTTLConfigKey: "3600",
}
// get ttl from configuration file
ttl, err := getCollectionTTL(properties1)
suite.NoError(err)
suite.Equal(ttl, time.Duration(3600)*time.Second)
properties2 := map[string]string{
common.CollectionTTLConfigKey: "error value",
}
// test for parsing configuration failed
ttl, err = getCollectionTTL(properties2)
suite.Error(err)
suite.Equal(int(ttl), -1)
ttl, err = getCollectionTTL(map[string]string{})
suite.NoError(err)
suite.Equal(ttl, Params.CommonCfg.EntityExpirationTTL)
}

View File

@ -795,12 +795,12 @@ func (t *compactionTask) GetCurrentTime() typeutil.Timestamp {
func (t *compactionTask) isExpiredEntity(ts, now Timestamp) bool { func (t *compactionTask) isExpiredEntity(ts, now Timestamp) bool {
// entity expire is not enabled if duration <= 0 // entity expire is not enabled if duration <= 0
if Params.CommonCfg.EntityExpirationTTL <= 0 { if t.plan.GetCollectionTtl() <= 0 {
return false return false
} }
pts, _ := tsoutil.ParseTS(ts) pts, _ := tsoutil.ParseTS(ts)
pnow, _ := tsoutil.ParseTS(now) pnow, _ := tsoutil.ParseTS(now)
expireTime := pts.Add(Params.CommonCfg.EntityExpirationTTL) expireTime := pts.Add(time.Duration(t.plan.GetCollectionTtl()))
return expireTime.Before(pnow) return expireTime.Before(pnow)
} }

View File

@ -345,7 +345,7 @@ func TestCompactionTaskInnerMethods(t *testing.T) {
t.Run("Merge with expiration", func(t *testing.T) { t.Run("Merge with expiration", func(t *testing.T) {
alloc := NewAllocatorFactory(1) alloc := NewAllocatorFactory(1)
mockbIO := &binlogIO{cm, alloc} mockbIO := &binlogIO{cm, alloc}
Params.CommonCfg.EntityExpirationTTL = 864000 // 10 days in seconds
iData := genInsertDataWithExpiredTS() iData := genInsertDataWithExpiredTS()
meta := NewMetaFactory().GetCollectionMeta(1, "test", schemapb.DataType_Int64) meta := NewMetaFactory().GetCollectionMeta(1, "test", schemapb.DataType_Int64)
@ -368,7 +368,15 @@ func TestCompactionTaskInnerMethods(t *testing.T) {
1: 10000, 1: 10000,
} }
ct := &compactionTask{Replica: replica, downloader: mockbIO, uploader: mockbIO} // 10 days in seconds
ct := &compactionTask{
Replica: replica,
downloader: mockbIO,
uploader: mockbIO,
plan: &datapb.CompactionPlan{
CollectionTtl: 864000,
},
}
inPaths, statsPaths, _, numOfRow, err := ct.merge(context.Background(), allPaths, 2, 0, meta, dm) inPaths, statsPaths, _, numOfRow, err := ct.merge(context.Background(), allPaths, 2, 0, meta, dm)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, int64(0), numOfRow) assert.Equal(t, int64(0), numOfRow)
@ -453,9 +461,12 @@ func TestCompactionTaskInnerMethods(t *testing.T) {
t.Run("Test isExpiredEntity", func(t *testing.T) { t.Run("Test isExpiredEntity", func(t *testing.T) {
t.Run("When CompactionEntityExpiration is set math.MaxInt64", func(t *testing.T) { t.Run("When CompactionEntityExpiration is set math.MaxInt64", func(t *testing.T) {
Params.CommonCfg.EntityExpirationTTL = math.MaxInt64 ct := &compactionTask{
plan: &datapb.CompactionPlan{
CollectionTtl: math.MaxInt64,
},
}
ct := &compactionTask{}
res := ct.isExpiredEntity(0, genTimestamp()) res := ct.isExpiredEntity(0, genTimestamp())
assert.Equal(t, false, res) assert.Equal(t, false, res)
@ -472,9 +483,12 @@ func TestCompactionTaskInnerMethods(t *testing.T) {
assert.Equal(t, false, res) assert.Equal(t, false, res)
}) })
t.Run("When CompactionEntityExpiration is set MAX_ENTITY_EXPIRATION = 0", func(t *testing.T) { t.Run("When CompactionEntityExpiration is set MAX_ENTITY_EXPIRATION = 0", func(t *testing.T) {
Params.CommonCfg.EntityExpirationTTL = 0 // 0 means expiration is not enabled // 0 means expiration is not enabled
ct := &compactionTask{
ct := &compactionTask{} plan: &datapb.CompactionPlan{
CollectionTtl: 0,
},
}
res := ct.isExpiredEntity(0, genTimestamp()) res := ct.isExpiredEntity(0, genTimestamp())
assert.Equal(t, false, res) assert.Equal(t, false, res)
@ -491,9 +505,12 @@ func TestCompactionTaskInnerMethods(t *testing.T) {
assert.Equal(t, false, res) assert.Equal(t, false, res)
}) })
t.Run("When CompactionEntityExpiration is set 10 days", func(t *testing.T) { t.Run("When CompactionEntityExpiration is set 10 days", func(t *testing.T) {
Params.CommonCfg.EntityExpirationTTL = 864000 // 10 days in seconds // 10 days in seconds
ct := &compactionTask{
ct := &compactionTask{} plan: &datapb.CompactionPlan{
CollectionTtl: 864000,
},
}
res := ct.isExpiredEntity(0, genTimestamp()) res := ct.isExpiredEntity(0, genTimestamp())
assert.Equal(t, true, res) assert.Equal(t, true, res)

View File

@ -271,6 +271,12 @@ func (ds *DataCoordFactory) MarkSegmentsDropped(context.Context, *datapb.MarkSeg
}, nil }, nil
} }
func (ds *DataCoordFactory) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
func (ds *DataCoordFactory) GetSegmentInfo(ctx context.Context, req *datapb.GetSegmentInfoRequest) (*datapb.GetSegmentInfoResponse, error) { func (ds *DataCoordFactory) GetSegmentInfo(ctx context.Context, req *datapb.GetSegmentInfoRequest) (*datapb.GetSegmentInfoResponse, error) {
if ds.GetSegmentInfosError { if ds.GetSegmentInfosError {
return nil, errors.New("mock error") return nil, errors.New("mock error")

View File

@ -629,3 +629,17 @@ func (c *Client) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmen
} }
return ret.(*commonpb.Status), err return ret.(*commonpb.Status), err
} }
// BroadCastAlteredCollection is the DataCoord client side code for BroadCastAlteredCollection call.
func (c *Client) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
ret, err := c.grpcClient.ReCall(ctx, func(client interface{}) (interface{}, error) {
if !funcutil.CheckCtxValid(ctx) {
return nil, ctx.Err()
}
return client.(datapb.DataCoordClient).BroadCastAlteredCollection(ctx, req)
})
if err != nil || ret == nil {
return nil, err
}
return ret.(*commonpb.Status), err
}

View File

@ -144,6 +144,11 @@ func Test_NewClient(t *testing.T) {
r31, err := client.ShowConfigurations(ctx, nil) r31, err := client.ShowConfigurations(ctx, nil)
retCheck(retNotNil, r31, err) retCheck(retNotNil, r31, err)
{
ret, err := client.BroadCastAlteredCollection(ctx, nil)
retCheck(retNotNil, ret, err)
}
} }
client.grpcClient = &mock.GRPCClientBase{ client.grpcClient = &mock.GRPCClientBase{

View File

@ -397,3 +397,7 @@ func (s *Server) UnsetIsImportingState(ctx context.Context, request *datapb.Unse
func (s *Server) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) { func (s *Server) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
return s.dataCoord.MarkSegmentsDropped(ctx, req) return s.dataCoord.MarkSegmentsDropped(ctx, req)
} }
func (s *Server) BroadCastAlteredCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return s.dataCoord.BroadCastAlteredCollection(ctx, request)
}

View File

@ -67,6 +67,7 @@ type MockDataCoord struct {
addSegmentResp *commonpb.Status addSegmentResp *commonpb.Status
unsetIsImportingStateResp *commonpb.Status unsetIsImportingStateResp *commonpb.Status
markSegmentsDroppedResp *commonpb.Status markSegmentsDroppedResp *commonpb.Status
broadCastResp *commonpb.Status
} }
func (m *MockDataCoord) Init() error { func (m *MockDataCoord) Init() error {
@ -219,6 +220,10 @@ func (m *MockDataCoord) MarkSegmentsDropped(ctx context.Context, req *datapb.Mar
return m.markSegmentsDroppedResp, m.err return m.markSegmentsDroppedResp, m.err
} }
func (m *MockDataCoord) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return m.broadCastResp, m.err
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func Test_NewServer(t *testing.T) { func Test_NewServer(t *testing.T) {
ctx := context.Background() ctx := context.Background()

View File

@ -113,6 +113,7 @@ func (h *Handlers) handleCreateCollection(c *gin.Context) (interface{}, error) {
Schema: schemaProto, Schema: schemaProto,
ShardsNum: wrappedReq.ShardsNum, ShardsNum: wrappedReq.ShardsNum,
ConsistencyLevel: wrappedReq.ConsistencyLevel, ConsistencyLevel: wrappedReq.ConsistencyLevel,
Properties: wrappedReq.Properties,
} }
return h.proxy.CreateCollection(c, req) return h.proxy.CreateCollection(c, req)
} }

View File

@ -65,6 +65,10 @@ func (mockProxyComponent) ShowCollections(ctx context.Context, request *milvuspb
return &milvuspb.ShowCollectionsResponse{Status: testStatus}, nil return &milvuspb.ShowCollectionsResponse{Status: testStatus}, nil
} }
func (mockProxyComponent) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return testStatus, nil
}
func (mockProxyComponent) CreatePartition(ctx context.Context, request *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (mockProxyComponent) CreatePartition(ctx context.Context, request *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
return testStatus, nil return testStatus, nil
} }

View File

@ -37,6 +37,7 @@ type WrappedCreateCollectionRequest struct {
ShardsNum int32 `protobuf:"varint,5,opt,name=shards_num,json=shardsNum,proto3" json:"shards_num,omitempty"` ShardsNum int32 `protobuf:"varint,5,opt,name=shards_num,json=shardsNum,proto3" json:"shards_num,omitempty"`
// The consistency level that the collection used, modification is not supported now. // The consistency level that the collection used, modification is not supported now.
ConsistencyLevel commonpb.ConsistencyLevel `protobuf:"varint,6,opt,name=consistency_level,json=consistencyLevel,proto3,enum=milvus.proto.common.ConsistencyLevel" json:"consistency_level,omitempty"` ConsistencyLevel commonpb.ConsistencyLevel `protobuf:"varint,6,opt,name=consistency_level,json=consistencyLevel,proto3,enum=milvus.proto.common.ConsistencyLevel" json:"consistency_level,omitempty"`
Properties []*commonpb.KeyValuePair `protobuf:"bytes,13,rep,name=properties,proto3" json:"properties,omitempty"`
} }
// WrappedInsertRequest is the InsertRequest wrapped for RESTful request // WrappedInsertRequest is the InsertRequest wrapped for RESTful request

View File

@ -592,6 +592,10 @@ func (s *Server) ShowCollections(ctx context.Context, request *milvuspb.ShowColl
return s.proxy.ShowCollections(ctx, request) return s.proxy.ShowCollections(ctx, request)
} }
func (s *Server) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return s.proxy.AlterCollection(ctx, request)
}
// CreatePartition notifies Proxy to create a partition // CreatePartition notifies Proxy to create a partition
func (s *Server) CreatePartition(ctx context.Context, request *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (s *Server) CreatePartition(ctx context.Context, request *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
return s.proxy.CreatePartition(ctx, request) return s.proxy.CreatePartition(ctx, request)

View File

@ -138,6 +138,10 @@ func (m *MockRootCoord) ShowCollections(ctx context.Context, req *milvuspb.ShowC
return nil, nil return nil, nil
} }
func (m *MockRootCoord) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return nil, nil
}
func (m *MockRootCoord) CreatePartition(ctx context.Context, req *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (m *MockRootCoord) CreatePartition(ctx context.Context, req *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
return nil, nil return nil, nil
} }
@ -478,6 +482,10 @@ func (m *MockDataCoord) MarkSegmentsDropped(ctx context.Context, req *datapb.Mar
return nil, nil return nil, nil
} }
func (m *MockDataCoord) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return nil, nil
}
func (m *MockDataCoord) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) { func (m *MockDataCoord) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) {
return nil, nil return nil, nil
} }
@ -574,6 +582,10 @@ func (m *MockDataCoord) ReleaseSegmentLock(ctx context.Context, req *datapb.Rele
return nil, nil return nil, nil
} }
func (m *MockDataCoord) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return nil, nil
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type MockProxy struct { type MockProxy struct {
MockBase MockBase
@ -637,6 +649,10 @@ func (m *MockProxy) ShowCollections(ctx context.Context, request *milvuspb.ShowC
return nil, nil return nil, nil
} }
func (m *MockProxy) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return nil, nil
}
func (m *MockProxy) CreatePartition(ctx context.Context, request *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (m *MockProxy) CreatePartition(ctx context.Context, request *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
return nil, nil return nil, nil
} }

View File

@ -231,6 +231,19 @@ func (c *Client) ShowCollections(ctx context.Context, in *milvuspb.ShowCollectio
return ret.(*milvuspb.ShowCollectionsResponse), err return ret.(*milvuspb.ShowCollectionsResponse), err
} }
func (c *Client) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
ret, err := c.grpcClient.ReCall(ctx, func(client interface{}) (interface{}, error) {
if !funcutil.CheckCtxValid(ctx) {
return nil, ctx.Err()
}
return client.(rootcoordpb.RootCoordClient).AlterCollection(ctx, request)
})
if err != nil || ret == nil {
return nil, err
}
return ret.(*commonpb.Status), err
}
// CreatePartition create partition // CreatePartition create partition
func (c *Client) CreatePartition(ctx context.Context, in *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (c *Client) CreatePartition(ctx context.Context, in *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
ret, err := c.grpcClient.ReCall(ctx, func(client interface{}) (interface{}, error) { ret, err := c.grpcClient.ReCall(ctx, func(client interface{}) (interface{}, error) {

View File

@ -474,3 +474,7 @@ func (s *Server) SelectGrant(ctx context.Context, request *milvuspb.SelectGrantR
func (s *Server) ListPolicy(ctx context.Context, request *internalpb.ListPolicyRequest) (*internalpb.ListPolicyResponse, error) { func (s *Server) ListPolicy(ctx context.Context, request *internalpb.ListPolicyRequest) (*internalpb.ListPolicyResponse, error) {
return s.rootCoord.ListPolicy(ctx, request) return s.rootCoord.ListPolicy(ctx, request)
} }
func (s *Server) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return s.rootCoord.AlterCollection(ctx, request)
}

View File

@ -103,6 +103,7 @@ func generateCollectionUpdatesWithoutID(in *dbmodel.Collection) map[string]inter
"start_position": in.StartPosition, "start_position": in.StartPosition,
"consistency_level": in.ConsistencyLevel, "consistency_level": in.ConsistencyLevel,
"status": in.Status, "status": in.Status,
"properties": in.Properties,
"ts": in.Ts, "ts": in.Ts,
"is_deleted": in.IsDeleted, "is_deleted": in.IsDeleted,
"created_at": in.CreatedAt, "created_at": in.CreatedAt,

View File

@ -9,8 +9,10 @@ import (
"testing" "testing"
"time" "time"
"github.com/milvus-io/milvus/internal/common"
"github.com/DATA-DOG/go-sqlmock" "github.com/DATA-DOG/go-sqlmock"
"github.com/milvus-io/milvus/api/commonpb" commonpb "github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/metastore/db/dbcore" "github.com/milvus-io/milvus/internal/metastore/db/dbcore"
"github.com/milvus-io/milvus/internal/metastore/db/dbmodel" "github.com/milvus-io/milvus/internal/metastore/db/dbmodel"
"github.com/milvus-io/milvus/internal/util/typeutil" "github.com/milvus-io/milvus/internal/util/typeutil"
@ -49,6 +51,14 @@ var (
userRoleTestDb dbmodel.IUserRoleDb userRoleTestDb dbmodel.IUserRoleDb
grantTestDb dbmodel.IGrantDb grantTestDb dbmodel.IGrantDb
grantIDTestDb dbmodel.IGrantIDDb grantIDTestDb dbmodel.IGrantIDDb
properties = []*commonpb.KeyValuePair{
{
Key: common.CollectionTTLConfigKey,
Value: "3600",
},
}
propertiesStr, _ = dbmodel.MarshalProperties(properties)
) )
// TestMain is the first function executed in current package, we will do some initial here // TestMain is the first function executed in current package, we will do some initial here
@ -350,6 +360,7 @@ func TestCollection_Insert(t *testing.T) {
ShardsNum: int32(2), ShardsNum: int32(2),
StartPosition: "", StartPosition: "",
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually), ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
Properties: propertiesStr,
Ts: ts, Ts: ts,
IsDeleted: false, IsDeleted: false,
CreatedAt: time.Now(), CreatedAt: time.Now(),
@ -358,8 +369,8 @@ func TestCollection_Insert(t *testing.T) {
// expectation // expectation
mock.ExpectBegin() mock.ExpectBegin()
mock.ExpectExec("INSERT INTO `collections` (`tenant_id`,`collection_id`,`collection_name`,`description`,`auto_id`,`shards_num`,`start_position`,`consistency_level`,`status`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE `id`=`id`"). mock.ExpectExec("INSERT INTO `collections` (`tenant_id`,`collection_id`,`collection_name`,`description`,`auto_id`,`shards_num`,`start_position`,`consistency_level`,`status`,`properties`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE `id`=`id`").
WithArgs(collection.TenantID, collection.CollectionID, collection.CollectionName, collection.Description, collection.AutoID, collection.ShardsNum, collection.StartPosition, collection.ConsistencyLevel, collection.Status, collection.Ts, collection.IsDeleted, collection.CreatedAt, collection.UpdatedAt). WithArgs(collection.TenantID, collection.CollectionID, collection.CollectionName, collection.Description, collection.AutoID, collection.ShardsNum, collection.StartPosition, collection.ConsistencyLevel, collection.Status, collection.Properties, collection.Ts, collection.IsDeleted, collection.CreatedAt, collection.UpdatedAt).
WillReturnResult(sqlmock.NewResult(1, 1)) WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectCommit() mock.ExpectCommit()
@ -386,8 +397,8 @@ func TestCollection_Insert_Error(t *testing.T) {
// expectation // expectation
mock.ExpectBegin() mock.ExpectBegin()
mock.ExpectExec("INSERT INTO `collections` (`tenant_id`,`collection_id`,`collection_name`,`description`,`auto_id`,`shards_num`,`start_position`,`consistency_level`,`status`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE `id`=`id`"). mock.ExpectExec("INSERT INTO `collections` (`tenant_id`,`collection_id`,`collection_name`,`description`,`auto_id`,`shards_num`,`start_position`,`consistency_level`,`status`,`properties`,`ts`,`is_deleted`,`created_at`,`updated_at`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE `id`=`id`").
WithArgs(collection.TenantID, collection.CollectionID, collection.CollectionName, collection.Description, collection.AutoID, collection.ShardsNum, collection.StartPosition, collection.ConsistencyLevel, collection.Status, collection.Ts, collection.IsDeleted, collection.CreatedAt, collection.UpdatedAt). WithArgs(collection.TenantID, collection.CollectionID, collection.CollectionName, collection.Description, collection.AutoID, collection.ShardsNum, collection.StartPosition, collection.ConsistencyLevel, collection.Status, collection.Properties, collection.Ts, collection.IsDeleted, collection.CreatedAt, collection.UpdatedAt).
WillReturnError(errors.New("test error")) WillReturnError(errors.New("test error"))
mock.ExpectRollback() mock.ExpectRollback()
@ -443,8 +454,8 @@ func Test_collectionDb_Update(t *testing.T) {
// expectation // expectation
mock.ExpectBegin() mock.ExpectBegin()
mock.ExpectExec("UPDATE `collections` SET `auto_id`=?,`collection_id`=?,`collection_name`=?,`consistency_level`=?,`created_at`=?,`description`=?,`is_deleted`=?,`shards_num`=?,`start_position`=?,`status`=?,`tenant_id`=?,`ts`=?,`updated_at`=? WHERE id = ?"). mock.ExpectExec("UPDATE `collections` SET `auto_id`=?,`collection_id`=?,`collection_name`=?,`consistency_level`=?,`created_at`=?,`description`=?,`is_deleted`=?,`properties`=?,`shards_num`=?,`start_position`=?,`status`=?,`tenant_id`=?,`ts`=?,`updated_at`=? WHERE id = ?").
WithArgs(collection.AutoID, collection.CollectionID, collection.CollectionName, collection.ConsistencyLevel, collection.CreatedAt, collection.Description, collection.IsDeleted, collection.ShardsNum, collection.StartPosition, collection.Status, collection.TenantID, collection.Ts, collection.UpdatedAt, collection.ID). WithArgs(collection.AutoID, collection.CollectionID, collection.CollectionName, collection.ConsistencyLevel, collection.CreatedAt, collection.Description, collection.IsDeleted, collection.Properties, collection.ShardsNum, collection.StartPosition, collection.Status, collection.TenantID, collection.Ts, collection.UpdatedAt, collection.ID).
WillReturnResult(sqlmock.NewResult(1, 1)) WillReturnResult(sqlmock.NewResult(1, 1))
mock.ExpectCommit() mock.ExpectCommit()
@ -471,8 +482,8 @@ func Test_collectionDb_Update(t *testing.T) {
// expectation // expectation
mock.ExpectBegin() mock.ExpectBegin()
mock.ExpectExec("UPDATE `collections` SET `auto_id`=?,`collection_id`=?,`collection_name`=?,`consistency_level`=?,`created_at`=?,`description`=?,`is_deleted`=?,`shards_num`=?,`start_position`=?,`status`=?,`tenant_id`=?,`ts`=?,`updated_at`=? WHERE id = ?"). mock.ExpectExec("UPDATE `collections` SET `auto_id`=?,`collection_id`=?,`collection_name`=?,`consistency_level`=?,`created_at`=?,`description`=?,`is_deleted`=?,`properties`=?,`shards_num`=?,`start_position`=?,`status`=?,`tenant_id`=?,`ts`=?,`updated_at`=? WHERE id = ?").
WithArgs(collection.AutoID, collection.CollectionID, collection.CollectionName, collection.ConsistencyLevel, collection.CreatedAt, collection.Description, collection.IsDeleted, collection.ShardsNum, collection.StartPosition, collection.Status, collection.TenantID, collection.Ts, collection.UpdatedAt, collection.ID). WithArgs(collection.AutoID, collection.CollectionID, collection.CollectionName, collection.ConsistencyLevel, collection.CreatedAt, collection.Description, collection.IsDeleted, collection.Properties, collection.ShardsNum, collection.StartPosition, collection.Status, collection.TenantID, collection.Ts, collection.UpdatedAt, collection.ID).
WillReturnError(errors.New("error mock Update")) WillReturnError(errors.New("error mock Update"))
mock.ExpectRollback() mock.ExpectRollback()

View File

@ -2,6 +2,7 @@ package dbmodel
import ( import (
"encoding/json" "encoding/json"
"fmt"
"time" "time"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
@ -22,6 +23,7 @@ type Collection struct {
StartPosition string `gorm:"start_position"` StartPosition string `gorm:"start_position"`
ConsistencyLevel int32 `gorm:"consistency_level"` ConsistencyLevel int32 `gorm:"consistency_level"`
Status int32 `gorm:"status"` Status int32 `gorm:"status"`
Properties string `gorm:"properties"`
Ts typeutil.Timestamp `gorm:"ts"` Ts typeutil.Timestamp `gorm:"ts"`
IsDeleted bool `gorm:"is_deleted"` IsDeleted bool `gorm:"is_deleted"`
CreatedAt time.Time `gorm:"created_at"` CreatedAt time.Time `gorm:"created_at"`
@ -55,6 +57,13 @@ func UnmarshalCollectionModel(coll *Collection) (*model.Collection, error) {
} }
} }
properties, err := UnmarshalProperties(coll.Properties)
if err != nil {
log.Error("unmarshal collection properties error", zap.Int64("collID", coll.CollectionID),
zap.String("properties", coll.Properties), zap.Error(err))
return nil, err
}
return &model.Collection{ return &model.Collection{
TenantID: coll.TenantID, TenantID: coll.TenantID,
CollectionID: coll.CollectionID, CollectionID: coll.CollectionID,
@ -65,5 +74,33 @@ func UnmarshalCollectionModel(coll *Collection) (*model.Collection, error) {
StartPositions: startPositions, StartPositions: startPositions,
ConsistencyLevel: commonpb.ConsistencyLevel(coll.ConsistencyLevel), ConsistencyLevel: commonpb.ConsistencyLevel(coll.ConsistencyLevel),
CreateTime: coll.Ts, CreateTime: coll.Ts,
Properties: properties,
}, nil }, nil
} }
func UnmarshalProperties(propertiesStr string) ([]*commonpb.KeyValuePair, error) {
if propertiesStr == "" {
return nil, nil
}
var properties []*commonpb.KeyValuePair
if propertiesStr != "" {
if err := json.Unmarshal([]byte(propertiesStr), &properties); err != nil {
return nil, fmt.Errorf("failed to unmarshal properties: %s", err.Error())
}
}
return properties, nil
}
func MarshalProperties(properties []*commonpb.KeyValuePair) (string, error) {
if properties == nil {
return "", nil
}
propertiesBytes, err := json.Marshal(properties)
if err != nil {
return "", fmt.Errorf("failed to marshal properties: %s", err.Error())
}
return string(propertiesBytes), nil
}

View File

@ -0,0 +1,106 @@
package dbmodel
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/common"
)
var (
ts = time.Now()
)
func TestUnmarshalCollectionModel(t *testing.T) {
t.Run("Unmarshal start position fail", func(t *testing.T) {
collection := &Collection{
StartPosition: "{\"error json\":}",
}
ret, err := UnmarshalCollectionModel(collection)
assert.Nil(t, ret)
assert.Error(t, err)
})
t.Run("Unmarshal properties fail", func(t *testing.T) {
collection := &Collection{
Properties: "{\"error json\":}",
}
ret, err := UnmarshalCollectionModel(collection)
assert.Nil(t, ret)
assert.Error(t, err)
})
t.Run("Unmarshal collection successfully", func(t *testing.T) {
collection := &Collection{
TenantID: "",
CollectionID: 1,
CollectionName: "cn",
Description: "",
AutoID: false,
ShardsNum: int32(1),
StartPosition: "",
ConsistencyLevel: int32(commonpb.ConsistencyLevel_Eventually),
Properties: "",
Ts: 1,
IsDeleted: false,
CreatedAt: ts,
UpdatedAt: ts,
}
ret, err := UnmarshalCollectionModel(collection)
assert.NotNil(t, ret)
assert.NoError(t, err)
assert.Equal(t, "", ret.TenantID)
assert.Equal(t, int64(1), ret.CollectionID)
assert.Equal(t, "cn", ret.Name)
assert.Equal(t, "", ret.Description)
assert.Equal(t, false, ret.AutoID)
assert.Equal(t, int32(1), ret.ShardsNum)
assert.Equal(t, 0, len(ret.StartPositions))
assert.Equal(t, commonpb.ConsistencyLevel(3), ret.ConsistencyLevel)
assert.Nil(t, ret.Properties)
assert.Equal(t, uint64(1), ret.CreateTime)
})
}
func TestUnmarshalAndMarshalProperties(t *testing.T) {
t.Run("Unmarshal and Marshal empty", func(t *testing.T) {
ret, err := UnmarshalProperties("")
assert.Nil(t, ret)
assert.NoError(t, err)
ret2, err := MarshalProperties(nil)
assert.Empty(t, ret2)
assert.NoError(t, err)
})
t.Run("Unmarshal and Marshal fail", func(t *testing.T) {
ret, err := UnmarshalProperties("{\"error json\":}")
assert.Nil(t, ret)
assert.Error(t, err)
})
t.Run("Unmarshal collection successfully", func(t *testing.T) {
properties := []*commonpb.KeyValuePair{
{
Key: common.CollectionTTLConfigKey,
Value: "3600",
},
}
propertiesStr, err := MarshalProperties(properties)
assert.NotEmpty(t, propertiesStr)
assert.NoError(t, err)
ret2, err := UnmarshalProperties(propertiesStr)
assert.NotNil(t, ret2)
assert.NoError(t, err)
assert.Equal(t, ret2, properties)
})
}

View File

@ -52,7 +52,12 @@ func (tc *Catalog) CreateCollection(ctx context.Context, collection *model.Colle
startPositionsStr = string(startPositionsBytes) startPositionsStr = string(startPositionsBytes)
} }
err := tc.metaDomain.CollectionDb(txCtx).Insert(&dbmodel.Collection{ properties, err := dbmodel.MarshalProperties(collection.Properties)
if err != nil {
return err
}
err = tc.metaDomain.CollectionDb(txCtx).Insert(&dbmodel.Collection{
TenantID: tenantID, TenantID: tenantID,
CollectionID: collection.CollectionID, CollectionID: collection.CollectionID,
CollectionName: collection.Name, CollectionName: collection.Name,
@ -63,6 +68,7 @@ func (tc *Catalog) CreateCollection(ctx context.Context, collection *model.Colle
ConsistencyLevel: int32(collection.ConsistencyLevel), ConsistencyLevel: int32(collection.ConsistencyLevel),
Status: int32(collection.State), Status: int32(collection.State),
Ts: ts, Ts: ts,
Properties: properties,
}) })
if err != nil { if err != nil {
return err return err
@ -395,6 +401,11 @@ func (tc *Catalog) alterModifyCollection(ctx context.Context, oldColl *model.Col
startPositionsStr = string(startPositionsBytes) startPositionsStr = string(startPositionsBytes)
} }
properties, err := dbmodel.MarshalProperties(newColl.Properties)
if err != nil {
return err
}
createdAt, _ := tsoutil.ParseTS(newColl.CreateTime) createdAt, _ := tsoutil.ParseTS(newColl.CreateTime)
tenantID := contextutil.TenantID(ctx) tenantID := contextutil.TenantID(ctx)
coll := &dbmodel.Collection{ coll := &dbmodel.Collection{
@ -410,6 +421,7 @@ func (tc *Catalog) alterModifyCollection(ctx context.Context, oldColl *model.Col
Ts: ts, Ts: ts,
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: time.Now(), UpdatedAt: time.Now(),
Properties: properties,
} }
return tc.metaDomain.CollectionDb(ctx).Update(coll) return tc.metaDomain.CollectionDb(ctx).Update(coll)

View File

@ -21,8 +21,8 @@ type Collection struct {
StartPositions []*commonpb.KeyDataPair StartPositions []*commonpb.KeyDataPair
CreateTime uint64 CreateTime uint64
ConsistencyLevel commonpb.ConsistencyLevel ConsistencyLevel commonpb.ConsistencyLevel
Aliases []string // TODO: deprecate this. Aliases []string // TODO: deprecate this.
Extra map[string]string // deprecated. Properties []*commonpb.KeyValuePair
State pb.CollectionState State pb.CollectionState
} }
@ -46,7 +46,7 @@ func (c Collection) Clone() *Collection {
CreateTime: c.CreateTime, CreateTime: c.CreateTime,
StartPositions: common.CloneKeyDataPairs(c.StartPositions), StartPositions: common.CloneKeyDataPairs(c.StartPositions),
Aliases: common.CloneStringList(c.Aliases), Aliases: common.CloneStringList(c.Aliases),
Extra: common.CloneStr2Str(c.Extra), Properties: common.CloneKeyValuePairs(c.Properties),
State: c.State, State: c.State,
} }
} }
@ -99,6 +99,7 @@ func UnmarshalCollectionModel(coll *pb.CollectionInfo) *Collection {
CreateTime: coll.CreateTime, CreateTime: coll.CreateTime,
StartPositions: coll.StartPositions, StartPositions: coll.StartPositions,
State: coll.State, State: coll.State,
Properties: coll.Properties,
} }
} }
@ -134,5 +135,6 @@ func MarshalCollectionModel(coll *Collection) *pb.CollectionInfo {
ConsistencyLevel: coll.ConsistencyLevel, ConsistencyLevel: coll.ConsistencyLevel,
StartPositions: coll.StartPositions, StartPositions: coll.StartPositions,
State: coll.State, State: coll.State,
Properties: coll.Properties,
} }
} }

View File

@ -51,6 +51,12 @@ var (
PartitionCreatedTimestamp: 1, PartitionCreatedTimestamp: 1,
}, },
}, },
Properties: []*commonpb.KeyValuePair{
{
Key: "k",
Value: "v",
},
},
} }
deprecatedColPb = &pb.CollectionInfo{ deprecatedColPb = &pb.CollectionInfo{
@ -76,6 +82,12 @@ var (
ShardsNum: 1, ShardsNum: 1,
StartPositions: startPositions, StartPositions: startPositions,
ConsistencyLevel: commonpb.ConsistencyLevel_Strong, ConsistencyLevel: commonpb.ConsistencyLevel_Strong,
Properties: []*commonpb.KeyValuePair{
{
Key: "k",
Value: "v",
},
},
} }
) )

View File

@ -123,6 +123,52 @@ func (_c *DataCoord_AssignSegmentID_Call) Return(_a0 *datapb.AssignSegmentIDResp
return _c return _c
} }
func (_m *DataCoord) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
ret := _m.Called(ctx, req)
var r0 *commonpb.Status
if rf, ok := ret.Get(0).(func(context.Context, *milvuspb.AlterCollectionRequest) *commonpb.Status); ok {
r0 = rf(ctx, req)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*commonpb.Status)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *milvuspb.AlterCollectionRequest) error); ok {
r1 = rf(ctx, req)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// DataCoord_BroadCastAlteredCollection_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DataCoord_BroadCastAlteredCollection_Call'
type DataCoord_BroadCastAlteredCollection_Call struct {
*mock.Call
}
// AddSegment is a helper method to define mock.On call
// - ctx context.Context
// - req *datapb.AddSegmentRequest
func (_e *DataCoord_Expecter) BroadCastAlteredCollection(ctx interface{}, req interface{}) *DataCoord_BroadCastAlteredCollection_Call {
return &DataCoord_BroadCastAlteredCollection_Call{Call: _e.mock.On("BroadCastAlteredCollection", ctx, req)}
}
func (_c *DataCoord_BroadCastAlteredCollection_Call) Run(run func(ctx context.Context, req *milvuspb.AlterCollectionRequest)) *DataCoord_BroadCastAlteredCollection_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context), args[1].(*milvuspb.AlterCollectionRequest))
})
return _c
}
func (_c *DataCoord_BroadCastAlteredCollection_Call) Return(_a0 *commonpb.Status, _a1 error) *DataCoord_BroadCastAlteredCollection_Call {
_c.Call.Return(_a0, _a1)
return _c
}
// DropVirtualChannel provides a mock function with given fields: ctx, req // DropVirtualChannel provides a mock function with given fields: ctx, req
func (_m *DataCoord) DropVirtualChannel(ctx context.Context, req *datapb.DropVirtualChannelRequest) (*datapb.DropVirtualChannelResponse, error) { func (_m *DataCoord) DropVirtualChannel(ctx context.Context, req *datapb.DropVirtualChannelRequest) (*datapb.DropVirtualChannelResponse, error) {
ret := _m.Called(ctx, req) ret := _m.Called(ctx, req)

View File

@ -1698,6 +1698,53 @@ func (_c *RootCoord_ShowCollections_Call) Return(_a0 *milvuspb.ShowCollectionsRe
return _c return _c
} }
// ShowCollections provides a mock function with given fields: ctx, req
func (_m *RootCoord) AlterCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
ret := _m.Called(ctx, req)
var r0 *commonpb.Status
if rf, ok := ret.Get(0).(func(context.Context, *milvuspb.AlterCollectionRequest) *commonpb.Status); ok {
r0 = rf(ctx, req)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*commonpb.Status)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *milvuspb.AlterCollectionRequest) error); ok {
r1 = rf(ctx, req)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// RootCoord_AlterCollection_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ShowCollections'
type RootCoord_AlterCollection_Call struct {
*mock.Call
}
// AlterCollection is a helper method to define mock.On call
// - ctx context.Context
// - req *milvuspb.AlterCollectionRequest
func (_e *RootCoord_Expecter) AlterCollection(ctx interface{}, req interface{}) *RootCoord_AlterCollection_Call {
return &RootCoord_AlterCollection_Call{Call: _e.mock.On("AlterCollection", ctx, req)}
}
func (_c *RootCoord_AlterCollection_Call) Run(run func(ctx context.Context, req *milvuspb.AlterCollectionRequest)) *RootCoord_AlterCollection_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context), args[1].(*milvuspb.AlterCollectionRequest))
})
return _c
}
func (_c *RootCoord_AlterCollection_Call) Return(_a0 *commonpb.Status, _a1 error) *RootCoord_AlterCollection_Call {
_c.Call.Return(_a0, _a1)
return _c
}
// ShowConfigurations provides a mock function with given fields: ctx, req // ShowConfigurations provides a mock function with given fields: ctx, req
func (_m *RootCoord) ShowConfigurations(ctx context.Context, req *internalpb.ShowConfigurationsRequest) (*internalpb.ShowConfigurationsResponse, error) { func (_m *RootCoord) ShowConfigurations(ctx context.Context, req *internalpb.ShowConfigurationsRequest) (*internalpb.ShowConfigurationsResponse, error) {
ret := _m.Called(ctx, req) ret := _m.Called(ctx, req)

View File

@ -146,6 +146,7 @@ enum MsgType {
CreateAlias = 108; CreateAlias = 108;
DropAlias = 109; DropAlias = 109;
AlterAlias = 110; AlterAlias = 110;
AlterCollection = 111;
/* DEFINITION REQUESTS: PARTITION */ /* DEFINITION REQUESTS: PARTITION */

View File

@ -63,6 +63,8 @@ service DataCoord {
rpc SaveImportSegment(SaveImportSegmentRequest) returns(common.Status) {} rpc SaveImportSegment(SaveImportSegmentRequest) returns(common.Status) {}
rpc UnsetIsImportingState(UnsetIsImportingStateRequest) returns(common.Status) {} rpc UnsetIsImportingState(UnsetIsImportingStateRequest) returns(common.Status) {}
rpc MarkSegmentsDropped(MarkSegmentsDroppedRequest) returns(common.Status) {} rpc MarkSegmentsDropped(MarkSegmentsDroppedRequest) returns(common.Status) {}
rpc BroadCastAlteredCollection(milvus.AlterCollectionRequest) returns (common.Status) {}
} }
service DataNode { service DataNode {
@ -244,13 +246,6 @@ message SegmentMsg{
SegmentInfo segment = 2; SegmentInfo segment = 2;
} }
message CollectionInfo {
int64 ID = 1;
schema.CollectionSchema schema = 2;
repeated int64 partitions = 3;
repeated common.KeyDataPair start_positions = 4;
}
message SegmentInfo { message SegmentInfo {
int64 ID = 1; int64 ID = 1;
int64 collectionID = 2; int64 collectionID = 2;
@ -454,6 +449,7 @@ message CompactionPlan {
CompactionType type = 5; CompactionType type = 5;
uint64 timetravel = 6; uint64 timetravel = 6;
string channel = 7; string channel = 7;
int64 collection_ttl = 8;
} }
message CompactionResult { message CompactionResult {

File diff suppressed because it is too large Load Diff

View File

@ -49,7 +49,7 @@ message CollectionInfo {
repeated int64 partitionIDs = 4; repeated int64 partitionIDs = 4;
// deprecate // deprecate
repeated string partitionNames = 5; repeated string partitionNames = 5;
// deprecat // deprecate
repeated FieldIndexInfo field_indexes = 6; repeated FieldIndexInfo field_indexes = 6;
repeated string virtual_channel_names = 7; repeated string virtual_channel_names = 7;
repeated string physical_channel_names = 8; repeated string physical_channel_names = 8;
@ -59,6 +59,7 @@ message CollectionInfo {
repeated common.KeyDataPair start_positions = 11; repeated common.KeyDataPair start_positions = 11;
common.ConsistencyLevel consistency_level = 12; common.ConsistencyLevel consistency_level = 12;
CollectionState state = 13; // To keep compatible with older version, default state is `Created`. CollectionState state = 13; // To keep compatible with older version, default state is `Created`.
repeated common.KeyValuePair properties = 14;
} }
message PartitionInfo { message PartitionInfo {

View File

@ -241,7 +241,7 @@ type CollectionInfo struct {
PartitionIDs []int64 `protobuf:"varint,4,rep,packed,name=partitionIDs,proto3" json:"partitionIDs,omitempty"` PartitionIDs []int64 `protobuf:"varint,4,rep,packed,name=partitionIDs,proto3" json:"partitionIDs,omitempty"`
// deprecate // deprecate
PartitionNames []string `protobuf:"bytes,5,rep,name=partitionNames,proto3" json:"partitionNames,omitempty"` PartitionNames []string `protobuf:"bytes,5,rep,name=partitionNames,proto3" json:"partitionNames,omitempty"`
// deprecat // deprecate
FieldIndexes []*FieldIndexInfo `protobuf:"bytes,6,rep,name=field_indexes,json=fieldIndexes,proto3" json:"field_indexes,omitempty"` FieldIndexes []*FieldIndexInfo `protobuf:"bytes,6,rep,name=field_indexes,json=fieldIndexes,proto3" json:"field_indexes,omitempty"`
VirtualChannelNames []string `protobuf:"bytes,7,rep,name=virtual_channel_names,json=virtualChannelNames,proto3" json:"virtual_channel_names,omitempty"` VirtualChannelNames []string `protobuf:"bytes,7,rep,name=virtual_channel_names,json=virtualChannelNames,proto3" json:"virtual_channel_names,omitempty"`
PhysicalChannelNames []string `protobuf:"bytes,8,rep,name=physical_channel_names,json=physicalChannelNames,proto3" json:"physical_channel_names,omitempty"` PhysicalChannelNames []string `protobuf:"bytes,8,rep,name=physical_channel_names,json=physicalChannelNames,proto3" json:"physical_channel_names,omitempty"`
@ -251,6 +251,7 @@ type CollectionInfo struct {
StartPositions []*commonpb.KeyDataPair `protobuf:"bytes,11,rep,name=start_positions,json=startPositions,proto3" json:"start_positions,omitempty"` StartPositions []*commonpb.KeyDataPair `protobuf:"bytes,11,rep,name=start_positions,json=startPositions,proto3" json:"start_positions,omitempty"`
ConsistencyLevel commonpb.ConsistencyLevel `protobuf:"varint,12,opt,name=consistency_level,json=consistencyLevel,proto3,enum=milvus.proto.common.ConsistencyLevel" json:"consistency_level,omitempty"` ConsistencyLevel commonpb.ConsistencyLevel `protobuf:"varint,12,opt,name=consistency_level,json=consistencyLevel,proto3,enum=milvus.proto.common.ConsistencyLevel" json:"consistency_level,omitempty"`
State CollectionState `protobuf:"varint,13,opt,name=state,proto3,enum=milvus.proto.etcd.CollectionState" json:"state,omitempty"` State CollectionState `protobuf:"varint,13,opt,name=state,proto3,enum=milvus.proto.etcd.CollectionState" json:"state,omitempty"`
Properties []*commonpb.KeyValuePair `protobuf:"bytes,14,rep,name=properties,proto3" json:"properties,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"` XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"` XXX_sizecache int32 `json:"-"`
@ -372,6 +373,13 @@ func (m *CollectionInfo) GetState() CollectionState {
return CollectionState_CollectionCreated return CollectionState_CollectionCreated
} }
func (m *CollectionInfo) GetProperties() []*commonpb.KeyValuePair {
if m != nil {
return m.Properties
}
return nil
}
type PartitionInfo struct { type PartitionInfo struct {
PartitionID int64 `protobuf:"varint,1,opt,name=partitionID,proto3" json:"partitionID,omitempty"` PartitionID int64 `protobuf:"varint,1,opt,name=partitionID,proto3" json:"partitionID,omitempty"`
PartitionName string `protobuf:"bytes,2,opt,name=partitionName,proto3" json:"partitionName,omitempty"` PartitionName string `protobuf:"bytes,2,opt,name=partitionName,proto3" json:"partitionName,omitempty"`
@ -771,68 +779,69 @@ func init() {
func init() { proto.RegisterFile("etcd_meta.proto", fileDescriptor_975d306d62b73e88) } func init() { proto.RegisterFile("etcd_meta.proto", fileDescriptor_975d306d62b73e88) }
var fileDescriptor_975d306d62b73e88 = []byte{ var fileDescriptor_975d306d62b73e88 = []byte{
// 1000 bytes of a gzipped FileDescriptorProto // 1020 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0xcb, 0x6e, 0x23, 0x45, 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0xcb, 0x8e, 0xdc, 0x44,
0x14, 0x9d, 0x76, 0xfb, 0xd5, 0xd7, 0x8f, 0xd8, 0xc5, 0x4c, 0xd4, 0x13, 0x66, 0xa0, 0xc7, 0x30, 0x14, 0x8d, 0xdb, 0xfd, 0xf2, 0xed, 0xc7, 0x74, 0x17, 0xc9, 0xc8, 0x19, 0x12, 0x70, 0x1a, 0x02,
0xd0, 0x1a, 0x69, 0x12, 0x91, 0xf0, 0xda, 0x80, 0x80, 0xb4, 0x46, 0xb2, 0x80, 0x91, 0xd5, 0x89, 0x56, 0xa4, 0xcc, 0x88, 0x19, 0x5e, 0x1b, 0x10, 0x61, 0xac, 0x48, 0x2d, 0x20, 0x6a, 0x79, 0x46,
0xb2, 0x60, 0xd3, 0x2a, 0x77, 0x57, 0xec, 0x42, 0xfd, 0x52, 0x57, 0x39, 0x90, 0x3f, 0xe0, 0x4f, 0x59, 0xb0, 0xb1, 0xaa, 0xed, 0x9a, 0xee, 0x42, 0x7e, 0xc9, 0x55, 0x3d, 0x30, 0x7f, 0xc0, 0x9f,
0xf8, 0x04, 0xf8, 0x01, 0xbe, 0x86, 0x35, 0x7b, 0x54, 0x55, 0xfd, 0xb6, 0xc3, 0x92, 0x9d, 0xef, 0xf0, 0x09, 0x7c, 0x01, 0x5f, 0xc3, 0x9a, 0x15, 0x1b, 0x54, 0x55, 0x7e, 0x77, 0x0f, 0x62, 0xc5,
0xe9, 0xba, 0xb7, 0xee, 0xb9, 0xf7, 0xd4, 0x31, 0x1c, 0x11, 0xee, 0x07, 0x5e, 0x44, 0x38, 0x3e, 0xce, 0xf7, 0x54, 0xdd, 0x5b, 0xf7, 0xdc, 0xc7, 0x31, 0x1c, 0x11, 0xee, 0x07, 0x5e, 0x44, 0x38,
0x4d, 0xb3, 0x84, 0x27, 0x68, 0x1e, 0xd1, 0xf0, 0x6e, 0xc7, 0x54, 0x74, 0x2a, 0xbe, 0x9e, 0x8c, 0x3e, 0x4d, 0xb3, 0x84, 0x27, 0x68, 0x1e, 0xd1, 0xf0, 0x76, 0xc7, 0x94, 0x75, 0x2a, 0x4e, 0x4f,
0xfd, 0x24, 0x8a, 0x92, 0x58, 0x41, 0x27, 0x63, 0xe6, 0x6f, 0x49, 0x94, 0x1f, 0x5f, 0xfc, 0xa5, 0xc6, 0x7e, 0x12, 0x45, 0x49, 0xac, 0xa0, 0x93, 0x31, 0xf3, 0xb7, 0x24, 0xca, 0xaf, 0x2f, 0xfe,
0x81, 0xb1, 0x8c, 0x03, 0xf2, 0xeb, 0x32, 0xbe, 0x4d, 0xd0, 0x73, 0x00, 0x2a, 0x02, 0x2f, 0xc6, 0xd0, 0xc0, 0x58, 0xc6, 0x01, 0xf9, 0x65, 0x19, 0xdf, 0x24, 0xe8, 0x29, 0x00, 0x15, 0x86, 0x17,
0x11, 0x31, 0x35, 0x4b, 0xb3, 0x0d, 0xd7, 0x90, 0xc8, 0x5b, 0x1c, 0x11, 0x64, 0xc2, 0x40, 0x06, 0xe3, 0x88, 0x98, 0x9a, 0xa5, 0xd9, 0x86, 0x6b, 0x48, 0xe4, 0x0d, 0x8e, 0x08, 0x32, 0x61, 0x20,
0x4b, 0xc7, 0xec, 0x58, 0x9a, 0xad, 0xbb, 0x45, 0x88, 0x1c, 0x18, 0xab, 0xc4, 0x14, 0x67, 0x38, 0x8d, 0xa5, 0x63, 0x76, 0x2c, 0xcd, 0xd6, 0xdd, 0xc2, 0x44, 0x0e, 0x8c, 0x95, 0x63, 0x8a, 0x33,
0x62, 0xa6, 0x6e, 0xe9, 0xf6, 0xe8, 0xfc, 0xc5, 0x69, 0xa3, 0x99, 0xbc, 0x8d, 0xef, 0xc9, 0xfd, 0x1c, 0x31, 0x53, 0xb7, 0x74, 0x7b, 0x74, 0xfe, 0xec, 0xb4, 0x91, 0x4c, 0x9e, 0xc6, 0x77, 0xe4,
0x0d, 0x0e, 0x77, 0x64, 0x85, 0x69, 0xe6, 0x8e, 0x64, 0xda, 0x4a, 0x66, 0x89, 0xfa, 0x01, 0x09, 0xee, 0x2d, 0x0e, 0x77, 0x64, 0x85, 0x69, 0xe6, 0x8e, 0xa4, 0xdb, 0x4a, 0x7a, 0x89, 0xf8, 0x01,
0x09, 0x27, 0x81, 0xd9, 0xb5, 0x34, 0x7b, 0xe8, 0x16, 0x21, 0x7a, 0x1f, 0x46, 0x7e, 0x46, 0x30, 0x09, 0x09, 0x27, 0x81, 0xd9, 0xb5, 0x34, 0x7b, 0xe8, 0x16, 0x26, 0x7a, 0x1f, 0x46, 0x7e, 0x46,
0x27, 0x1e, 0xa7, 0x11, 0x31, 0x7b, 0x96, 0x66, 0x77, 0x5d, 0x50, 0xd0, 0x35, 0x8d, 0xc8, 0xc2, 0x30, 0x27, 0x1e, 0xa7, 0x11, 0x31, 0x7b, 0x96, 0x66, 0x77, 0x5d, 0x50, 0xd0, 0x35, 0x8d, 0xc8,
0x81, 0xe9, 0x1b, 0x4a, 0xc2, 0xa0, 0xe2, 0x62, 0xc2, 0xe0, 0x96, 0x86, 0x24, 0x58, 0x3a, 0x92, 0xc2, 0x81, 0xe9, 0x6b, 0x4a, 0xc2, 0xa0, 0xe2, 0x62, 0xc2, 0xe0, 0x86, 0x86, 0x24, 0x58, 0x3a,
0x88, 0xee, 0x16, 0xe1, 0xc3, 0x34, 0x16, 0x7f, 0xf6, 0x60, 0x7a, 0x99, 0x84, 0x21, 0xf1, 0x39, 0x92, 0x88, 0xee, 0x16, 0xe6, 0xfd, 0x34, 0x16, 0x7f, 0xf7, 0x60, 0x7a, 0x99, 0x84, 0x21, 0xf1,
0x4d, 0x62, 0x59, 0x66, 0x0a, 0x9d, 0xb2, 0x42, 0x67, 0xe9, 0xa0, 0xaf, 0xa0, 0xaf, 0x06, 0x28, 0x39, 0x4d, 0x62, 0x19, 0x66, 0x0a, 0x9d, 0x32, 0x42, 0x67, 0xe9, 0xa0, 0xaf, 0xa0, 0xaf, 0x0a,
0x73, 0x47, 0xe7, 0x2f, 0x9b, 0x1c, 0xf3, 0xe1, 0x56, 0x45, 0xae, 0x24, 0xe0, 0xe6, 0x49, 0x6d, 0x28, 0x7d, 0x47, 0xe7, 0xcf, 0x9b, 0x1c, 0xf3, 0xe2, 0x56, 0x41, 0xae, 0x24, 0xe0, 0xe6, 0x4e,
0x22, 0x7a, 0x9b, 0x08, 0x5a, 0xc0, 0x38, 0xc5, 0x19, 0xa7, 0xb2, 0x01, 0x87, 0x99, 0x5d, 0x4b, 0x6d, 0x22, 0x7a, 0x9b, 0x08, 0x5a, 0xc0, 0x38, 0xc5, 0x19, 0xa7, 0x32, 0x01, 0x87, 0x99, 0x5d,
0xb7, 0x75, 0xb7, 0x81, 0xa1, 0x8f, 0x60, 0x5a, 0xc6, 0x62, 0x31, 0xcc, 0xec, 0x59, 0xba, 0x6d, 0x4b, 0xb7, 0x75, 0xb7, 0x81, 0xa1, 0x8f, 0x60, 0x5a, 0xda, 0xa2, 0x31, 0xcc, 0xec, 0x59, 0xba,
0xb8, 0x2d, 0x14, 0xbd, 0x81, 0xc9, 0xad, 0x18, 0x8a, 0x27, 0xf9, 0x11, 0x66, 0xf6, 0x0f, 0xad, 0x6d, 0xb8, 0x2d, 0x14, 0xbd, 0x86, 0xc9, 0x8d, 0x28, 0x8a, 0x27, 0xf9, 0x11, 0x66, 0xf6, 0x0f,
0x45, 0x68, 0xe4, 0xb4, 0x39, 0x3c, 0x77, 0x7c, 0x5b, 0xc6, 0x84, 0xa1, 0x73, 0x78, 0x72, 0x47, 0xb5, 0x45, 0xcc, 0xc8, 0x69, 0xb3, 0x78, 0xee, 0xf8, 0xa6, 0xb4, 0x09, 0x43, 0xe7, 0xf0, 0xe8,
0x33, 0xbe, 0xc3, 0xa1, 0xe7, 0x6f, 0x71, 0x1c, 0x93, 0x50, 0x0a, 0x84, 0x99, 0x03, 0x79, 0xed, 0x96, 0x66, 0x7c, 0x87, 0x43, 0xcf, 0xdf, 0xe2, 0x38, 0x26, 0xa1, 0x1c, 0x10, 0x66, 0x0e, 0xe4,
0x3b, 0xf9, 0xc7, 0x4b, 0xf5, 0x4d, 0xdd, 0xfd, 0x29, 0x1c, 0xa7, 0xdb, 0x7b, 0x46, 0xfd, 0xbd, 0xb3, 0xef, 0xe4, 0x87, 0x97, 0xea, 0x4c, 0xbd, 0xfd, 0x29, 0x1c, 0xa7, 0xdb, 0x3b, 0x46, 0xfd,
0xa4, 0xa1, 0x4c, 0x7a, 0x5c, 0x7c, 0x6d, 0x64, 0x7d, 0x03, 0xcf, 0x4a, 0x0e, 0x9e, 0x9a, 0x4a, 0x3d, 0xa7, 0xa1, 0x74, 0x7a, 0x58, 0x9c, 0x36, 0xbc, 0xbe, 0x81, 0x27, 0x25, 0x07, 0x4f, 0x55,
0x20, 0x27, 0xc5, 0x38, 0x8e, 0x52, 0x66, 0x1a, 0x96, 0x6e, 0x77, 0xdd, 0x93, 0xf2, 0xcc, 0xa5, 0x25, 0x90, 0x95, 0x62, 0x1c, 0x47, 0x29, 0x33, 0x0d, 0x4b, 0xb7, 0xbb, 0xee, 0x49, 0x79, 0xe7,
0x3a, 0x72, 0x5d, 0x9e, 0x10, 0x12, 0x66, 0x5b, 0x9c, 0x05, 0xcc, 0x8b, 0x77, 0x91, 0x09, 0x96, 0x52, 0x5d, 0xb9, 0x2e, 0x6f, 0x88, 0x11, 0x66, 0x5b, 0x9c, 0x05, 0xcc, 0x8b, 0x77, 0x91, 0x09,
0x66, 0xf7, 0x5c, 0x43, 0x21, 0x6f, 0x77, 0x11, 0x5a, 0xc2, 0x11, 0xe3, 0x38, 0xe3, 0x5e, 0x9a, 0x96, 0x66, 0xf7, 0x5c, 0x43, 0x21, 0x6f, 0x76, 0x11, 0x5a, 0xc2, 0x11, 0xe3, 0x38, 0xe3, 0x5e,
0x30, 0x59, 0x81, 0x99, 0x23, 0x39, 0x14, 0xeb, 0x21, 0xad, 0x3a, 0x98, 0x63, 0x29, 0xd5, 0xa9, 0x9a, 0x30, 0x19, 0x81, 0x99, 0x23, 0x59, 0x14, 0xeb, 0xbe, 0x59, 0x75, 0x30, 0xc7, 0x72, 0x54,
0x4c, 0x5c, 0x15, 0x79, 0xc8, 0x85, 0xb9, 0x9f, 0xc4, 0x8c, 0x32, 0x4e, 0x62, 0xff, 0xde, 0x0b, 0xa7, 0xd2, 0x71, 0x55, 0xf8, 0x21, 0x17, 0xe6, 0x7e, 0x12, 0x33, 0xca, 0x38, 0x89, 0xfd, 0x3b,
0xc9, 0x1d, 0x09, 0xcd, 0xb1, 0xa5, 0xd9, 0xd3, 0xb6, 0x28, 0xf2, 0x62, 0x97, 0xd5, 0xe9, 0x1f, 0x2f, 0x24, 0xb7, 0x24, 0x34, 0xc7, 0x96, 0x66, 0x4f, 0xdb, 0x43, 0x91, 0x07, 0xbb, 0xac, 0x6e,
0xc4, 0x61, 0x77, 0xe6, 0xb7, 0x10, 0xf4, 0x25, 0xf4, 0x18, 0xc7, 0x9c, 0x98, 0x13, 0x59, 0x67, 0x7f, 0x2f, 0x2e, 0xbb, 0x33, 0xbf, 0x85, 0xa0, 0x2f, 0xa1, 0xc7, 0x38, 0xe6, 0xc4, 0x9c, 0xc8,
0x71, 0x60, 0x53, 0x35, 0x69, 0x89, 0x93, 0xae, 0x4a, 0x58, 0xfc, 0xa3, 0xc1, 0x64, 0x55, 0x8a, 0x38, 0x8b, 0x03, 0x9d, 0xaa, 0x8d, 0x96, 0xb8, 0xe9, 0x2a, 0x07, 0xf4, 0x0a, 0x20, 0xcd, 0x92,
0x44, 0x28, 0xd7, 0x82, 0x51, 0x4d, 0x35, 0xb9, 0x84, 0xeb, 0x10, 0xfa, 0x10, 0x26, 0x0d, 0xc5, 0x94, 0x64, 0x9c, 0x12, 0x66, 0x4e, 0xff, 0xeb, 0xfe, 0xd5, 0x9c, 0x16, 0x7f, 0x69, 0x30, 0x59,
0x48, 0x49, 0x1b, 0x6e, 0x13, 0x44, 0x5f, 0xc3, 0xbb, 0xff, 0xb1, 0x93, 0x5c, 0xc2, 0x4f, 0x1f, 0x95, 0x73, 0x26, 0x86, 0xdf, 0x82, 0x51, 0x6d, 0xf0, 0xf2, 0x2d, 0xa8, 0x43, 0xe8, 0x43, 0x98,
0x5c, 0x09, 0xfa, 0x00, 0x26, 0x7e, 0xd9, 0xb3, 0x47, 0xd5, 0xdb, 0xd6, 0xdd, 0x71, 0x05, 0x2e, 0x34, 0x86, 0x4e, 0x6e, 0x85, 0xe1, 0x36, 0x41, 0xf4, 0x35, 0xbc, 0xfb, 0x2f, 0x6d, 0xcd, 0xb7,
0x03, 0xf4, 0x45, 0x41, 0xbc, 0x27, 0x89, 0x1f, 0x92, 0x68, 0xc9, 0xae, 0xc1, 0xfb, 0x77, 0x0d, 0xe0, 0xf1, 0xbd, 0x5d, 0x45, 0x1f, 0xc0, 0xc4, 0x2f, 0x69, 0x7b, 0x54, 0xc9, 0x83, 0xee, 0x8e,
0x8c, 0x6f, 0x43, 0x8a, 0x59, 0x61, 0x60, 0x58, 0x04, 0x0d, 0x03, 0x93, 0x88, 0xa4, 0xb2, 0xd7, 0x2b, 0x70, 0x19, 0xa0, 0x2f, 0x8a, 0xda, 0xf5, 0x64, 0xed, 0x0e, 0x4d, 0x79, 0xc9, 0xae, 0x5e,
0x4a, 0xe7, 0x40, 0x2b, 0x2f, 0x60, 0x5c, 0x67, 0x99, 0x13, 0xcc, 0x9f, 0xad, 0xe4, 0x85, 0x2e, 0xba, 0xc5, 0x6f, 0x1a, 0x18, 0xaf, 0x42, 0x8a, 0x59, 0xa1, 0x81, 0x58, 0x18, 0x0d, 0x0d, 0x94,
0x8a, 0x6e, 0xbb, 0xb2, 0xdb, 0xe7, 0x07, 0xba, 0x95, 0x3d, 0x35, 0x3a, 0xfd, 0xad, 0x03, 0xb3, 0x88, 0xa4, 0xb2, 0x97, 0x4a, 0xe7, 0x40, 0x2a, 0xcf, 0x60, 0x5c, 0x67, 0x99, 0x13, 0xcc, 0x37,
0x2b, 0xb2, 0x89, 0x48, 0xcc, 0x2b, 0x97, 0x5a, 0x40, 0xfd, 0xf2, 0x62, 0x4b, 0x0d, 0xac, 0xbd, 0x5f, 0xf2, 0x42, 0x17, 0x45, 0xb6, 0x5d, 0x99, 0xed, 0xd3, 0x03, 0xd9, 0xca, 0x9c, 0x1a, 0x99,
0xc8, 0xce, 0xfe, 0x22, 0x9f, 0x81, 0xc1, 0xf2, 0xca, 0x8e, 0xec, 0x57, 0x77, 0x2b, 0x40, 0x39, 0xfe, 0xda, 0x81, 0xd9, 0x15, 0xd9, 0x44, 0x24, 0xe6, 0x95, 0xd0, 0x2d, 0xa0, 0xfe, 0x78, 0xd1,
0xa1, 0x78, 0xce, 0x4e, 0x3e, 0xfa, 0x22, 0xac, 0x3b, 0x61, 0xaf, 0x69, 0xe8, 0x26, 0x0c, 0xd6, 0xa5, 0x06, 0xd6, 0x6e, 0x64, 0x67, 0xbf, 0x91, 0x4f, 0xc0, 0x60, 0x79, 0x64, 0x47, 0xe6, 0xab,
0x3b, 0x2a, 0x73, 0xfa, 0xea, 0x4b, 0x1e, 0x8a, 0xf1, 0x90, 0x18, 0xaf, 0x43, 0xa2, 0x5c, 0xc5, 0xbb, 0x15, 0xa0, 0xc4, 0x54, 0x28, 0x82, 0x93, 0x97, 0xbe, 0x30, 0xeb, 0x62, 0xda, 0x6b, 0xfe,
0x1c, 0x48, 0xa7, 0x1e, 0x29, 0x4c, 0x12, 0x6b, 0x9b, 0xdc, 0x70, 0xcf, 0xad, 0xff, 0xd6, 0xea, 0x13, 0x4c, 0x18, 0xac, 0x77, 0x54, 0xfa, 0xf4, 0xd5, 0x49, 0x6e, 0x8a, 0xf2, 0x90, 0x18, 0xaf,
0x3e, 0xfb, 0x23, 0xe1, 0xf8, 0x7f, 0xf7, 0xd9, 0xf7, 0x00, 0xca, 0x09, 0x15, 0x2e, 0x5b, 0x43, 0x43, 0xa2, 0x84, 0xc9, 0x1c, 0x48, 0xb1, 0x1f, 0x29, 0x4c, 0x12, 0x6b, 0xeb, 0xe4, 0x70, 0x4f,
0xd0, 0xcb, 0x9a, 0xc7, 0x7a, 0x1c, 0x6f, 0x0a, 0x8f, 0xad, 0x1e, 0xc7, 0x35, 0xde, 0xb0, 0x3d, 0xf0, 0xff, 0xd4, 0xea, 0x52, 0xfd, 0x03, 0xe1, 0xf8, 0x7f, 0x97, 0xea, 0xf7, 0x00, 0xca, 0x0a,
0xbb, 0xee, 0xef, 0xdb, 0xf5, 0xe2, 0x0f, 0xc1, 0x36, 0x23, 0x01, 0x89, 0x39, 0xc5, 0xa1, 0x5c, 0x15, 0x42, 0x5d, 0x43, 0xd0, 0xf3, 0x9a, 0x4c, 0x7b, 0x1c, 0x6f, 0x0a, 0x99, 0xae, 0x96, 0xe3,
0xfb, 0x09, 0x0c, 0x77, 0x8c, 0x64, 0x35, 0x95, 0x96, 0x31, 0x7a, 0x0d, 0x88, 0xc4, 0x7e, 0x76, 0x1a, 0x6f, 0xd8, 0x9e, 0xe2, 0xf7, 0xf7, 0x15, 0x7f, 0xf1, 0xbb, 0x60, 0x9b, 0x91, 0x80, 0xc4,
0x9f, 0x0a, 0x05, 0xa6, 0x98, 0xb1, 0x5f, 0x92, 0x2c, 0xc8, 0x9f, 0xe6, 0xbc, 0xfc, 0xb2, 0xca, 0x9c, 0xe2, 0x50, 0xb6, 0xfd, 0x04, 0x86, 0x3b, 0x46, 0xb2, 0xda, 0x94, 0x96, 0x36, 0x7a, 0x09,
0x3f, 0xa0, 0x63, 0xe8, 0x73, 0x12, 0xe3, 0x98, 0x4b, 0x92, 0x86, 0x9b, 0x47, 0xe8, 0x29, 0x0c, 0x88, 0xc4, 0x7e, 0x76, 0x97, 0x8a, 0x09, 0x4c, 0x31, 0x63, 0x3f, 0x27, 0x59, 0x90, 0xaf, 0xe6,
0x29, 0xf3, 0xd8, 0x2e, 0x25, 0x59, 0xf1, 0x6f, 0x4a, 0xd9, 0x95, 0x08, 0xd1, 0xc7, 0x70, 0xc4, 0xbc, 0x3c, 0x59, 0xe5, 0x07, 0xe8, 0x18, 0xfa, 0x9c, 0xc4, 0x38, 0xe6, 0x92, 0xa4, 0xe1, 0xe6,
0xb6, 0xf8, 0xfc, 0xb3, 0xcf, 0xab, 0xf2, 0x3d, 0x99, 0x3b, 0x55, 0x70, 0x51, 0xfb, 0x55, 0x02, 0x16, 0x7a, 0x0c, 0x43, 0xca, 0x3c, 0xb6, 0x4b, 0x49, 0x56, 0xfc, 0x90, 0x29, 0xbb, 0x12, 0x26,
0x47, 0x2d, 0xbb, 0x41, 0x4f, 0x60, 0x5e, 0x41, 0xf9, 0x5b, 0x9f, 0x3d, 0x42, 0xc7, 0x80, 0x5a, 0xfa, 0x18, 0x8e, 0xd8, 0x16, 0x9f, 0x7f, 0xf6, 0x79, 0x15, 0xbe, 0x27, 0x7d, 0xa7, 0x0a, 0x2e,
0x30, 0x8d, 0x37, 0x33, 0xad, 0x89, 0x3b, 0x59, 0x92, 0xa6, 0x02, 0xef, 0x34, 0xcb, 0x48, 0x9c, 0x62, 0xbf, 0x48, 0xe0, 0xa8, 0xa5, 0x58, 0xe8, 0x11, 0xcc, 0x2b, 0x28, 0xdf, 0xf5, 0xd9, 0x03,
0x04, 0x33, 0xfd, 0xd5, 0xcf, 0x30, 0x6d, 0x3e, 0x73, 0xf4, 0x18, 0x66, 0xab, 0x96, 0xb5, 0xcc, 0x74, 0x0c, 0xa8, 0x05, 0xd3, 0x78, 0x33, 0xd3, 0x9a, 0xb8, 0x93, 0x25, 0x69, 0x2a, 0xf0, 0x4e,
0x1e, 0x89, 0xf4, 0x26, 0xaa, 0x6e, 0xab, 0xc3, 0xb5, 0xcb, 0xea, 0x35, 0xaa, 0xbb, 0x6e, 0x00, 0x33, 0x8c, 0xc4, 0x49, 0x30, 0xd3, 0x5f, 0xfc, 0x04, 0xd3, 0xe6, 0x9a, 0xa3, 0x87, 0x30, 0x5b,
0xaa, 0x47, 0x8a, 0x66, 0x30, 0x96, 0x51, 0x75, 0xc7, 0x1c, 0x26, 0x15, 0xa2, 0xea, 0x17, 0x50, 0xb5, 0xa4, 0x65, 0xf6, 0x40, 0xb8, 0x37, 0x51, 0xf5, 0x5a, 0x1d, 0xae, 0x3d, 0x56, 0x8f, 0x51,
0xad, 0x76, 0x91, 0x57, 0xd6, 0xfd, 0xee, 0xe2, 0xa7, 0x4f, 0x36, 0x94, 0x6f, 0x77, 0x6b, 0xe1, 0xbd, 0xf5, 0x16, 0xa0, 0x5a, 0x52, 0x34, 0x83, 0xb1, 0xb4, 0xaa, 0x37, 0xe6, 0x30, 0xa9, 0x10,
0xfb, 0x67, 0x4a, 0xb5, 0xaf, 0x69, 0x92, 0xff, 0x3a, 0xa3, 0x31, 0x17, 0x8b, 0x0e, 0xcf, 0xa4, 0x15, 0xbf, 0x80, 0x6a, 0xb1, 0x0b, 0xbf, 0x32, 0xee, 0xb7, 0x17, 0x3f, 0x7e, 0xb2, 0xa1, 0x7c,
0x90, 0xcf, 0x84, 0x59, 0xa4, 0xeb, 0x75, 0x5f, 0x46, 0x17, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xbb, 0x5b, 0x0b, 0xcd, 0x3e, 0x53, 0x53, 0xfb, 0x92, 0x26, 0xf9, 0xd7, 0x19, 0x8d, 0xb9, 0x68,
0x43, 0x11, 0x13, 0x31, 0xd0, 0x09, 0x00, 0x00, 0x74, 0x78, 0x26, 0x07, 0xf9, 0x4c, 0x88, 0x45, 0xba, 0x5e, 0xf7, 0xa5, 0x75, 0xf1, 0x4f, 0x00,
0x00, 0x00, 0xff, 0xff, 0xf9, 0x76, 0x1c, 0x4f, 0x13, 0x0a, 0x00, 0x00,
} }

View File

@ -21,6 +21,7 @@ service MilvusService {
rpc DescribeCollection(DescribeCollectionRequest) returns (DescribeCollectionResponse) {} rpc DescribeCollection(DescribeCollectionRequest) returns (DescribeCollectionResponse) {}
rpc GetCollectionStatistics(GetCollectionStatisticsRequest) returns (GetCollectionStatisticsResponse) {} rpc GetCollectionStatistics(GetCollectionStatisticsRequest) returns (GetCollectionStatisticsResponse) {}
rpc ShowCollections(ShowCollectionsRequest) returns (ShowCollectionsResponse) {} rpc ShowCollections(ShowCollectionsRequest) returns (ShowCollectionsResponse) {}
rpc AlterCollection(AlterCollectionRequest) returns (common.Status) {}
rpc CreatePartition(CreatePartitionRequest) returns (common.Status) {} rpc CreatePartition(CreatePartitionRequest) returns (common.Status) {}
rpc DropPartition(DropPartitionRequest) returns (common.Status) {} rpc DropPartition(DropPartitionRequest) returns (common.Status) {}
@ -133,6 +134,7 @@ message CreateCollectionRequest {
int32 shards_num = 5; int32 shards_num = 5;
// The consistency level that the collection used, modification is not supported now. // The consistency level that the collection used, modification is not supported now.
common.ConsistencyLevel consistency_level = 6; common.ConsistencyLevel consistency_level = 6;
repeated common.KeyValuePair properties = 7;
} }
/** /**
@ -152,6 +154,25 @@ message DropCollectionRequest {
string collection_name = 3; string collection_name = 3;
} }
/**
* Alter collection in milvus
*/
message AlterCollectionRequest {
option (common.privilege_ext_obj) = {
object_type: Global
object_privilege: PrivilegeCreateCollection
object_name_index: -1
};
// Not useful for now
common.MsgBase base = 1;
// Not useful for now
string db_name = 2;
// The unique collection name in milvus.(Required)
string collection_name = 3;
int64 collectionID = 4;
repeated common.KeyValuePair properties = 5;
}
/** /**
* Check collection exist in milvus or not. * Check collection exist in milvus or not.
*/ */
@ -226,6 +247,7 @@ message DescribeCollectionResponse {
common.ConsistencyLevel consistency_level = 11; common.ConsistencyLevel consistency_level = 11;
// The collection name // The collection name
string collection_name = 12; string collection_name = 12;
repeated common.KeyValuePair properties = 13;
} }
/** /**

View File

@ -61,11 +61,13 @@ service RootCoord {
*/ */
rpc ShowCollections(milvus.ShowCollectionsRequest) returns (milvus.ShowCollectionsResponse) {} rpc ShowCollections(milvus.ShowCollectionsRequest) returns (milvus.ShowCollectionsResponse) {}
/** rpc AlterCollection(milvus.AlterCollectionRequest) returns (common.Status) {}
* @brief This method is used to create partition
* /**
* @return Status * @brief This method is used to create partition
*/ *
* @return Status
*/
rpc CreatePartition(milvus.CreatePartitionRequest) returns (common.Status) {} rpc CreatePartition(milvus.CreatePartitionRequest) returns (common.Status) {}
/** /**

View File

@ -674,101 +674,102 @@ func init() {
func init() { proto.RegisterFile("root_coord.proto", fileDescriptor_4513485a144f6b06) } func init() { proto.RegisterFile("root_coord.proto", fileDescriptor_4513485a144f6b06) }
var fileDescriptor_4513485a144f6b06 = []byte{ var fileDescriptor_4513485a144f6b06 = []byte{
// 1498 bytes of a gzipped FileDescriptorProto // 1515 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x58, 0x5b, 0x73, 0x13, 0x37, 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x58, 0xdb, 0x72, 0xdb, 0x36,
0x14, 0xc6, 0x36, 0x49, 0xec, 0x63, 0xc7, 0x0e, 0x1a, 0x2e, 0xae, 0xa1, 0xd4, 0x18, 0x5a, 0xcc, 0x13, 0x8e, 0xa4, 0xd8, 0x96, 0x56, 0xb2, 0xe4, 0x60, 0x72, 0xd0, 0xaf, 0xe4, 0xff, 0xa3, 0x28,
0xcd, 0xa1, 0x61, 0x86, 0x52, 0xde, 0x88, 0xcd, 0x04, 0x4f, 0x9b, 0x21, 0x5d, 0x43, 0x87, 0x5e, 0xf9, 0x1b, 0xe5, 0x24, 0xa7, 0xce, 0x4c, 0x9a, 0xe6, 0x2e, 0x96, 0x32, 0x8e, 0xa6, 0xf5, 0xc4,
0x18, 0x57, 0xde, 0x15, 0x8e, 0x26, 0xeb, 0x95, 0x59, 0xc9, 0xb9, 0x4c, 0x9f, 0x3a, 0xd3, 0xf7, 0xa5, 0x92, 0x4e, 0x7a, 0xf0, 0xa8, 0x10, 0x89, 0xc8, 0x1c, 0x53, 0x84, 0x02, 0x40, 0x3e, 0x4c,
0xfe, 0xa1, 0x3e, 0xb5, 0x3f, 0xa5, 0x7f, 0xa4, 0xa3, 0xd5, 0xae, 0xbc, 0xbb, 0xde, 0x75, 0x36, 0xaf, 0x3a, 0xd3, 0xfb, 0x3e, 0x40, 0xdf, 0xa6, 0x7d, 0x94, 0xbe, 0x48, 0x07, 0x04, 0x09, 0x91,
0xc0, 0x9b, 0xa5, 0xfd, 0xf4, 0x7d, 0x47, 0x9f, 0x74, 0x74, 0x24, 0xc3, 0x86, 0xcb, 0x98, 0x18, 0x14, 0x29, 0xd3, 0x49, 0xee, 0x04, 0xf0, 0xc3, 0xf7, 0x2d, 0x76, 0xb1, 0xbb, 0x80, 0x60, 0x83,
0x9a, 0x8c, 0xb9, 0x56, 0x67, 0xea, 0x32, 0xc1, 0xd0, 0xe5, 0x09, 0xb5, 0x0f, 0x67, 0x5c, 0xb5, 0x51, 0x2a, 0x86, 0x26, 0xa5, 0xcc, 0xea, 0x4c, 0x19, 0x15, 0x14, 0x5d, 0x9d, 0xd8, 0xce, 0xd1,
0x3a, 0xf2, 0xb3, 0xf7, 0xb5, 0x51, 0x31, 0xd9, 0x64, 0xc2, 0x1c, 0xd5, 0xdf, 0xa8, 0x84, 0x51, 0x8c, 0xab, 0x51, 0x47, 0x7e, 0xf6, 0xbe, 0x36, 0x2a, 0x26, 0x9d, 0x4c, 0xa8, 0xab, 0xe6, 0x1b,
0x8d, 0x2a, 0x75, 0x04, 0x71, 0x1d, 0x6c, 0xfb, 0xed, 0xf2, 0xd4, 0x65, 0xc7, 0x27, 0x7e, 0xa3, 0x95, 0x30, 0xaa, 0x51, 0xb5, 0x5d, 0x41, 0x98, 0x8b, 0x1d, 0x7f, 0x5c, 0x9e, 0x32, 0x7a, 0x72,
0x46, 0x84, 0x69, 0x0d, 0x27, 0x44, 0x60, 0xd5, 0xd1, 0x1a, 0xc2, 0xa5, 0x67, 0xb6, 0xcd, 0xcc, 0xea, 0x0f, 0x6a, 0x44, 0x98, 0xd6, 0x70, 0x42, 0x04, 0x56, 0x13, 0xad, 0x21, 0x5c, 0x79, 0xe1,
0x57, 0x74, 0x42, 0xb8, 0xc0, 0x93, 0xa9, 0x41, 0xde, 0xcf, 0x08, 0x17, 0xe8, 0x21, 0x9c, 0x1f, 0x38, 0xd4, 0x7c, 0x63, 0x4f, 0x08, 0x17, 0x78, 0x32, 0x35, 0xc8, 0x87, 0x19, 0xe1, 0x02, 0x3d,
0x61, 0x4e, 0xea, 0xb9, 0x66, 0xae, 0x5d, 0xde, 0xba, 0xd6, 0x89, 0x44, 0xe2, 0xcb, 0xef, 0xf2, 0x86, 0x8b, 0x23, 0xcc, 0x49, 0x3d, 0xd7, 0xcc, 0xb5, 0xcb, 0x5b, 0x37, 0x3a, 0x11, 0x4b, 0x7c,
0xf1, 0x36, 0xe6, 0xc4, 0xf0, 0x90, 0xe8, 0x22, 0xac, 0x98, 0x6c, 0xe6, 0x88, 0x7a, 0xa1, 0x99, 0xf9, 0x5d, 0x3e, 0xde, 0xc6, 0x9c, 0x18, 0x1e, 0x12, 0x5d, 0x86, 0x15, 0x93, 0xce, 0x5c, 0x51,
0x6b, 0xaf, 0x1b, 0xaa, 0xd1, 0xfa, 0x23, 0x07, 0x97, 0xe3, 0x0a, 0x7c, 0xca, 0x1c, 0x4e, 0xd0, 0x2f, 0x34, 0x73, 0xed, 0x75, 0x43, 0x0d, 0x5a, 0xbf, 0xe5, 0xe0, 0x6a, 0x5c, 0x81, 0x4f, 0xa9,
0x23, 0x58, 0xe5, 0x02, 0x8b, 0x19, 0xf7, 0x45, 0xae, 0x26, 0x8a, 0x0c, 0x3c, 0x88, 0xe1, 0x43, 0xcb, 0x09, 0x7a, 0x02, 0xab, 0x5c, 0x60, 0x31, 0xe3, 0xbe, 0xc8, 0xf5, 0x44, 0x91, 0x81, 0x07,
0xd1, 0x35, 0x28, 0x89, 0x80, 0xa9, 0x9e, 0x6f, 0xe6, 0xda, 0xe7, 0x8d, 0x79, 0x47, 0x4a, 0x0c, 0x31, 0x7c, 0x28, 0xba, 0x01, 0x25, 0x11, 0x30, 0xd5, 0xf3, 0xcd, 0x5c, 0xfb, 0xa2, 0x31, 0x9f,
0x6f, 0xa0, 0xea, 0x85, 0xd0, 0xef, 0x7d, 0x82, 0xd9, 0xe5, 0xc3, 0xcc, 0x36, 0xd4, 0x34, 0xf3, 0x48, 0xb1, 0xe1, 0x1d, 0x54, 0x3d, 0x13, 0xfa, 0xbd, 0xcf, 0xb0, 0xbb, 0x7c, 0x98, 0xd9, 0x81,
0xc7, 0xcc, 0xaa, 0x0a, 0xf9, 0x7e, 0xcf, 0xa3, 0x2e, 0x18, 0xf9, 0x7e, 0x2f, 0x65, 0x1e, 0xff, 0x9a, 0x66, 0xfe, 0x94, 0x5d, 0x55, 0x21, 0xdf, 0xef, 0x79, 0xd4, 0x05, 0x23, 0xdf, 0xef, 0xa5,
0xe4, 0xa1, 0xd2, 0x9f, 0x4c, 0x99, 0x2b, 0x0c, 0xc2, 0x67, 0xb6, 0xf8, 0x30, 0xad, 0x2b, 0xb0, 0xec, 0xe3, 0xaf, 0x3c, 0x54, 0xfa, 0x93, 0x29, 0x65, 0xc2, 0x20, 0x7c, 0xe6, 0x88, 0x8f, 0xd3,
0x26, 0x30, 0x3f, 0x18, 0x52, 0xcb, 0x17, 0x5c, 0x95, 0xcd, 0xbe, 0x85, 0xbe, 0x80, 0xb2, 0x85, 0xba, 0x06, 0x6b, 0x02, 0xf3, 0xc3, 0xa1, 0x6d, 0xf9, 0x82, 0xab, 0x72, 0xd8, 0xb7, 0xd0, 0x4d,
0x05, 0x76, 0x98, 0x45, 0xe4, 0xc7, 0x82, 0xf7, 0x11, 0x82, 0xae, 0xbe, 0x85, 0x1e, 0xc3, 0x8a, 0x28, 0x5b, 0x58, 0x60, 0x97, 0x5a, 0x44, 0x7e, 0x2c, 0x78, 0x1f, 0x21, 0x98, 0xea, 0x5b, 0xe8,
0xe4, 0x20, 0xf5, 0xf3, 0xcd, 0x5c, 0xbb, 0xba, 0xd5, 0x4c, 0x54, 0x53, 0x01, 0x4a, 0x4d, 0x62, 0x29, 0xac, 0x48, 0x0e, 0x52, 0xbf, 0xd8, 0xcc, 0xb5, 0xab, 0x5b, 0xcd, 0x44, 0x35, 0x65, 0xa0,
0x28, 0x38, 0x6a, 0x40, 0x91, 0x93, 0xf1, 0x84, 0x38, 0x82, 0xd7, 0x57, 0x9a, 0x85, 0x76, 0xc1, 0xd4, 0x24, 0x86, 0x82, 0xa3, 0x06, 0x14, 0x39, 0x19, 0x4f, 0x88, 0x2b, 0x78, 0x7d, 0xa5, 0x59,
0xd0, 0x6d, 0xf4, 0x19, 0x14, 0xf1, 0x4c, 0xb0, 0x21, 0xb5, 0x78, 0x7d, 0xd5, 0xfb, 0xb6, 0x26, 0x68, 0x17, 0x0c, 0x3d, 0x46, 0xff, 0x81, 0x22, 0x9e, 0x09, 0x3a, 0xb4, 0x2d, 0x5e, 0x5f, 0xf5,
0xdb, 0x7d, 0x8b, 0xa3, 0xab, 0x50, 0x72, 0xd9, 0xd1, 0x50, 0x19, 0xb1, 0xe6, 0x45, 0x53, 0x74, 0xbe, 0xad, 0xc9, 0x71, 0xdf, 0xe2, 0xe8, 0x3a, 0x94, 0x18, 0x3d, 0x1e, 0x2a, 0x47, 0xac, 0x79,
0xd9, 0x51, 0x57, 0xb6, 0xd1, 0x37, 0xb0, 0x42, 0x9d, 0x77, 0x8c, 0xd7, 0x8b, 0xcd, 0x42, 0xbb, 0xd6, 0x14, 0x19, 0x3d, 0xee, 0xca, 0x31, 0xfa, 0x0a, 0x56, 0x6c, 0xf7, 0x3d, 0xe5, 0xf5, 0x62,
0xbc, 0x75, 0x23, 0x31, 0x96, 0xef, 0xc8, 0xc9, 0x8f, 0xd8, 0x9e, 0x91, 0x3d, 0x4c, 0x5d, 0x43, 0xb3, 0xd0, 0x2e, 0x6f, 0xdd, 0x4a, 0xb4, 0xe5, 0x1b, 0x72, 0xfa, 0x3d, 0x76, 0x66, 0x64, 0x0f,
0xe1, 0x5b, 0x7f, 0xe5, 0xe0, 0x4a, 0x8f, 0x70, 0xd3, 0xa5, 0x23, 0x32, 0xf0, 0xa3, 0xf8, 0xf0, 0xdb, 0xcc, 0x50, 0xf8, 0xd6, 0x1f, 0x39, 0xb8, 0xd6, 0x23, 0xdc, 0x64, 0xf6, 0x88, 0x0c, 0x7c,
0x6d, 0xd1, 0x82, 0x8a, 0xc9, 0x6c, 0x9b, 0x98, 0x82, 0x32, 0x47, 0x2f, 0x61, 0xa4, 0x0f, 0x5d, 0x2b, 0x3e, 0xfe, 0x58, 0xb4, 0xa0, 0x62, 0x52, 0xc7, 0x21, 0xa6, 0xb0, 0xa9, 0xab, 0x43, 0x18,
0x07, 0xf0, 0xa7, 0xdb, 0xef, 0xf1, 0x7a, 0xc1, 0x9b, 0x64, 0xa8, 0xa7, 0x35, 0x83, 0x9a, 0x1f, 0x99, 0x43, 0xff, 0x03, 0xf0, 0xb7, 0xdb, 0xef, 0xf1, 0x7a, 0xc1, 0xdb, 0x64, 0x68, 0xa6, 0x35,
0x88, 0x24, 0xee, 0x3b, 0xef, 0xd8, 0x02, 0x6d, 0x2e, 0x81, 0xb6, 0x09, 0xe5, 0x29, 0x76, 0x05, 0x83, 0x9a, 0x6f, 0x88, 0x24, 0xee, 0xbb, 0xef, 0xe9, 0x02, 0x6d, 0x2e, 0x81, 0xb6, 0x09, 0xe5,
0x8d, 0x28, 0x87, 0xbb, 0x64, 0xae, 0x68, 0x19, 0x7f, 0x39, 0xe7, 0x1d, 0xad, 0xff, 0xf2, 0x50, 0x29, 0x66, 0xc2, 0x8e, 0x28, 0x87, 0xa7, 0x64, 0xae, 0x68, 0x19, 0x3f, 0x9c, 0xf3, 0x89, 0xd6,
0xf1, 0x75, 0xa5, 0x26, 0x47, 0x3d, 0x28, 0xc9, 0x39, 0x0d, 0xa5, 0x4f, 0xbe, 0x05, 0xb7, 0x3b, 0x3f, 0x79, 0xa8, 0xf8, 0xba, 0x52, 0x93, 0xa3, 0x1e, 0x94, 0xe4, 0x9e, 0x86, 0xd2, 0x4f, 0xbe,
0xc9, 0x27, 0x50, 0x27, 0x16, 0xb0, 0x51, 0x1c, 0x05, 0xa1, 0xf7, 0xa0, 0x4c, 0x1d, 0x8b, 0x1c, 0x0b, 0xee, 0x76, 0x92, 0x2b, 0x50, 0x27, 0x66, 0xb0, 0x51, 0x1c, 0x05, 0xa6, 0xf7, 0xa0, 0x6c,
0x0f, 0xd5, 0xf2, 0xe4, 0xbd, 0xe5, 0xb9, 0x19, 0xe5, 0x91, 0xa7, 0x50, 0x47, 0x6b, 0x5b, 0xe4, 0xbb, 0x16, 0x39, 0x19, 0xaa, 0xf0, 0xe4, 0xbd, 0xf0, 0xdc, 0x8e, 0xf2, 0xc8, 0x2a, 0xd4, 0xd1,
0xd8, 0xe3, 0x00, 0x1a, 0xfc, 0xe4, 0x88, 0xc0, 0x05, 0x72, 0x2c, 0x5c, 0x3c, 0x0c, 0x73, 0x15, 0xda, 0x16, 0x39, 0xf1, 0x38, 0xc0, 0x0e, 0x7e, 0x72, 0x44, 0xe0, 0x12, 0x39, 0x11, 0x0c, 0x0f,
0x3c, 0xae, 0x6f, 0x4f, 0x89, 0xc9, 0x23, 0xe8, 0x3c, 0x97, 0xa3, 0x35, 0x37, 0x7f, 0xee, 0x08, 0xc3, 0x5c, 0x05, 0x8f, 0xeb, 0xeb, 0x33, 0x6c, 0xf2, 0x08, 0x3a, 0x2f, 0xe5, 0x6a, 0xcd, 0xcd,
0xf7, 0xc4, 0xa8, 0x91, 0x68, 0x6f, 0xe3, 0x37, 0xb8, 0x98, 0x04, 0x44, 0x1b, 0x50, 0x38, 0x20, 0x5f, 0xba, 0x82, 0x9d, 0x1a, 0x35, 0x12, 0x9d, 0x6d, 0xfc, 0x02, 0x97, 0x93, 0x80, 0x68, 0x03,
0x27, 0xbe, 0xed, 0xf2, 0x27, 0xda, 0x82, 0x95, 0x43, 0xb9, 0x95, 0x3c, 0x9f, 0x17, 0xf6, 0x86, 0x0a, 0x87, 0xe4, 0xd4, 0x77, 0xbb, 0xfc, 0x89, 0xb6, 0x60, 0xe5, 0x48, 0x1e, 0x25, 0xcf, 0xcf,
0x37, 0xa1, 0xf9, 0x4c, 0x14, 0xf4, 0x69, 0xfe, 0x49, 0xae, 0xf5, 0x6f, 0x1e, 0xea, 0x8b, 0xdb, 0x0b, 0x67, 0xc3, 0xdb, 0xd0, 0x7c, 0x27, 0x0a, 0xfa, 0x3c, 0xff, 0x2c, 0xd7, 0xfa, 0x3b, 0x0f,
0xed, 0x63, 0xce, 0x8a, 0x2c, 0x5b, 0x6e, 0x0c, 0xeb, 0xfe, 0x42, 0x47, 0xac, 0xdb, 0x4e, 0xb3, 0xf5, 0xc5, 0xe3, 0xf6, 0x29, 0xb5, 0x22, 0xcb, 0x91, 0x1b, 0xc3, 0xba, 0x1f, 0xe8, 0x88, 0xeb,
0x2e, 0x2d, 0xc2, 0x88, 0xa7, 0xca, 0xc3, 0x0a, 0x0f, 0x75, 0x35, 0x08, 0x5c, 0x58, 0x80, 0x24, 0xb6, 0xd3, 0x5c, 0x97, 0x66, 0x61, 0xc4, 0xa7, 0xca, 0x87, 0x15, 0x1e, 0x9a, 0x6a, 0x10, 0xb8,
0xb8, 0xf7, 0x34, 0xea, 0xde, 0xad, 0x2c, 0x4b, 0x18, 0x76, 0xd1, 0x82, 0x8b, 0x3b, 0x44, 0x74, 0xb4, 0x00, 0x49, 0xf0, 0xde, 0xf3, 0xa8, 0xf7, 0xee, 0x64, 0x09, 0x61, 0xd8, 0x8b, 0x16, 0x5c,
0x5d, 0x62, 0x11, 0x47, 0x50, 0x6c, 0x7f, 0x78, 0xc2, 0x36, 0xa0, 0x38, 0xe3, 0xb2, 0x3e, 0x4e, 0xde, 0x21, 0xa2, 0xcb, 0x88, 0x45, 0x5c, 0x61, 0x63, 0xe7, 0xe3, 0x13, 0xb6, 0x01, 0xc5, 0x19,
0x54, 0x30, 0x25, 0x43, 0xb7, 0x5b, 0x7f, 0xe6, 0xe0, 0x52, 0x4c, 0xe6, 0x63, 0x16, 0x6a, 0x89, 0x97, 0xfd, 0x71, 0xa2, 0x8c, 0x29, 0x19, 0x7a, 0xdc, 0xfa, 0x3d, 0x07, 0x57, 0x62, 0x32, 0x9f,
0x94, 0xfc, 0x36, 0xc5, 0x9c, 0x1f, 0x31, 0x57, 0x1d, 0xb4, 0x25, 0x43, 0xb7, 0xb7, 0xfe, 0xbe, 0x12, 0xa8, 0x25, 0x52, 0xf2, 0xdb, 0x14, 0x73, 0x7e, 0x4c, 0x99, 0x2a, 0xb4, 0x25, 0x43, 0x8f,
0x0e, 0x25, 0x83, 0x31, 0xd1, 0x95, 0x96, 0x20, 0x1b, 0x90, 0x8c, 0x89, 0x4d, 0xa6, 0xcc, 0x21, 0xb7, 0xfe, 0xbc, 0x09, 0x25, 0x83, 0x52, 0xd1, 0x95, 0x2e, 0x41, 0x0e, 0x20, 0x69, 0x13, 0x9d,
0x8e, 0x3a, 0x58, 0x39, 0xea, 0x44, 0x03, 0xf0, 0x1b, 0x8b, 0x40, 0xdf, 0xa8, 0xc6, 0xad, 0x44, 0x4c, 0xa9, 0x4b, 0x5c, 0x55, 0x58, 0x39, 0xea, 0x44, 0x0d, 0xf0, 0x07, 0x8b, 0x40, 0xdf, 0x51,
0x7c, 0x0c, 0xdc, 0x3a, 0x87, 0x26, 0x9e, 0x9a, 0xac, 0xd5, 0xaf, 0xa8, 0x79, 0xd0, 0xdd, 0xc7, 0x8d, 0x3b, 0x89, 0xf8, 0x18, 0xb8, 0x75, 0x01, 0x4d, 0x3c, 0x35, 0xd9, 0xab, 0xdf, 0xd8, 0xe6,
0x8e, 0x43, 0x6c, 0xf4, 0x30, 0x3a, 0x5a, 0xdf, 0x30, 0x16, 0xa1, 0x81, 0xde, 0xcd, 0x44, 0xbd, 0x61, 0xf7, 0x00, 0xbb, 0x2e, 0x71, 0xd0, 0xe3, 0xe8, 0x6a, 0x7d, 0xc3, 0x58, 0x84, 0x06, 0x7a,
0x81, 0x70, 0xa9, 0x33, 0x0e, 0x5c, 0x6d, 0x9d, 0x43, 0xef, 0xbd, 0x75, 0x95, 0xea, 0x94, 0x0b, 0xb7, 0x13, 0xf5, 0x06, 0x82, 0xd9, 0xee, 0x38, 0xf0, 0x6a, 0xeb, 0x02, 0xfa, 0xe0, 0xc5, 0x55,
0x6a, 0xf2, 0x40, 0x70, 0x2b, 0x5d, 0x70, 0x01, 0x7c, 0x46, 0xc9, 0x21, 0x6c, 0x74, 0x5d, 0x82, 0xaa, 0xdb, 0x5c, 0xd8, 0x26, 0x0f, 0x04, 0xb7, 0xd2, 0x05, 0x17, 0xc0, 0xe7, 0x94, 0x1c, 0xc2,
0x05, 0xe9, 0xea, 0x84, 0x41, 0xf7, 0x93, 0xdd, 0x89, 0xc1, 0x02, 0xa1, 0x65, 0x8b, 0xdf, 0x3a, 0x46, 0x97, 0x11, 0x2c, 0x48, 0x57, 0x27, 0x0c, 0x7a, 0x98, 0xec, 0x9d, 0x18, 0x2c, 0x10, 0x5a,
0x87, 0x7e, 0x81, 0x6a, 0xcf, 0x65, 0xd3, 0x10, 0xfd, 0xdd, 0x44, 0xfa, 0x28, 0x28, 0x23, 0xf9, 0x16, 0xfc, 0xd6, 0x05, 0xf4, 0x13, 0x54, 0x7b, 0x8c, 0x4e, 0x43, 0xf4, 0xf7, 0x13, 0xe9, 0xa3,
0x10, 0xd6, 0x5f, 0x60, 0x1e, 0xe2, 0xbe, 0x93, 0xc8, 0x1d, 0xc1, 0x04, 0xd4, 0x37, 0x12, 0xa1, 0xa0, 0x8c, 0xe4, 0x43, 0x58, 0x7f, 0x85, 0x79, 0x88, 0xfb, 0x5e, 0x22, 0x77, 0x04, 0x13, 0x50,
0xdb, 0x8c, 0xd9, 0x21, 0x7b, 0x8e, 0x00, 0x05, 0x87, 0x41, 0x48, 0x25, 0x79, 0xbb, 0x2d, 0x02, 0xdf, 0x4a, 0x84, 0x6e, 0x53, 0xea, 0x84, 0xdc, 0x73, 0x0c, 0x28, 0x28, 0x06, 0x21, 0x95, 0xe4,
0x03, 0xa9, 0xcd, 0xcc, 0x78, 0x2d, 0xfc, 0x1a, 0xca, 0xca, 0xf0, 0x67, 0x36, 0xc5, 0x1c, 0xdd, 0xe3, 0xb6, 0x08, 0x0c, 0xa4, 0x36, 0x33, 0xe3, 0xb5, 0xf0, 0x5b, 0x28, 0x2b, 0x87, 0xbf, 0x70,
0x5e, 0xb2, 0x24, 0x1e, 0x22, 0xa3, 0x61, 0x3f, 0x40, 0x49, 0x1a, 0xad, 0x48, 0xbf, 0x4c, 0x5d, 0x6c, 0xcc, 0xd1, 0xdd, 0x25, 0x21, 0xf1, 0x10, 0x19, 0x1d, 0xf6, 0x1d, 0x94, 0xa4, 0xa3, 0x15,
0x88, 0xb3, 0x50, 0x0e, 0x00, 0x9e, 0xd9, 0x82, 0xb8, 0x8a, 0xf3, 0xab, 0x44, 0xce, 0x39, 0x20, 0xe9, 0xff, 0x53, 0x03, 0x71, 0x1e, 0xca, 0x01, 0xc0, 0x0b, 0x47, 0x10, 0xa6, 0x38, 0xbf, 0x48,
0x23, 0xa9, 0x03, 0xb5, 0xc1, 0xbe, 0xbc, 0xdc, 0x04, 0xd6, 0x70, 0x74, 0x2f, 0x79, 0x43, 0x47, 0xe4, 0x9c, 0x03, 0x32, 0x92, 0xba, 0x50, 0x1b, 0x1c, 0xc8, 0xcb, 0x4d, 0xe0, 0x1a, 0x8e, 0x1e,
0x51, 0x01, 0xfd, 0xfd, 0x6c, 0x60, 0x6d, 0xf7, 0x5b, 0xa8, 0x29, 0x33, 0xf7, 0x82, 0x0b, 0x43, 0x24, 0x1f, 0xe8, 0x28, 0x2a, 0xa0, 0x7f, 0x98, 0x0d, 0xac, 0xdd, 0xbd, 0x2f, 0x6f, 0xae, 0x82,
0x8a, 0x5e, 0x0c, 0x95, 0x71, 0x3a, 0x3f, 0xc1, 0xba, 0xb4, 0x75, 0x4e, 0x7e, 0x27, 0xd5, 0xfa, 0xb0, 0x50, 0x90, 0x1f, 0xa4, 0xef, 0xe4, 0xdc, 0xe7, 0x74, 0x1f, 0x6a, 0x2a, 0x56, 0x7b, 0xc1,
0xb3, 0x52, 0xbf, 0x85, 0xca, 0x0b, 0xcc, 0xe7, 0xcc, 0xed, 0xb4, 0x0c, 0x58, 0x20, 0xce, 0x94, 0x7d, 0x24, 0x85, 0x3e, 0x86, 0xca, 0x48, 0xff, 0x03, 0xac, 0xcb, 0xa8, 0xcd, 0xc9, 0xef, 0xa5,
0x00, 0x07, 0x50, 0x95, 0xae, 0xe9, 0xc1, 0x3c, 0x25, 0x7d, 0xa3, 0xa0, 0x40, 0xe2, 0x5e, 0x26, 0x46, 0xf6, 0xbc, 0xd4, 0xfb, 0x50, 0x79, 0x85, 0xf9, 0x9c, 0xb9, 0x9d, 0x96, 0x60, 0x0b, 0xc4,
0xac, 0x16, 0x23, 0x50, 0x91, 0xdf, 0x82, 0xb2, 0x9b, 0x32, 0x97, 0x30, 0x24, 0x10, 0xba, 0x93, 0x99, 0xf2, 0xeb, 0x10, 0xaa, 0x32, 0x28, 0x7a, 0x31, 0x4f, 0xa9, 0x0e, 0x51, 0x50, 0x20, 0xf1,
0x01, 0x19, 0x3a, 0x66, 0xab, 0xd1, 0x37, 0x18, 0x7a, 0x90, 0x56, 0x81, 0x13, 0x5f, 0x83, 0x8d, 0x20, 0x13, 0x56, 0x8b, 0x11, 0xa8, 0xc8, 0x6f, 0x41, 0x57, 0x4f, 0xd9, 0x4b, 0x18, 0x12, 0x08,
0x4e, 0x56, 0xb8, 0x96, 0xfc, 0x15, 0xd6, 0xfc, 0x97, 0x51, 0x3c, 0x43, 0x62, 0x83, 0xf5, 0xa3, 0xdd, 0xcb, 0x80, 0x0c, 0x55, 0xf1, 0x6a, 0xf4, 0x89, 0x87, 0x1e, 0xa5, 0x35, 0xf8, 0xc4, 0xc7,
0xac, 0x71, 0xfb, 0x54, 0x9c, 0x66, 0xc7, 0x70, 0xe9, 0xf5, 0xd4, 0x92, 0xa7, 0xb3, 0xaa, 0x01, 0x66, 0xa3, 0x93, 0x15, 0xae, 0x25, 0x7f, 0x86, 0x35, 0xff, 0xe1, 0x15, 0x4f, 0xc0, 0xd8, 0x62,
0x41, 0x15, 0x8a, 0x6f, 0x33, 0x5d, 0x38, 0x62, 0xb8, 0x5d, 0x3e, 0x3e, 0x6d, 0x9b, 0xb9, 0xf0, 0xfd, 0xe6, 0x6b, 0xdc, 0x3d, 0x13, 0xa7, 0xd9, 0x31, 0x5c, 0x79, 0x3b, 0xb5, 0x64, 0xf1, 0x57,
0x79, 0xdf, 0x39, 0xc4, 0x36, 0xb5, 0x22, 0x45, 0x60, 0x97, 0x08, 0xdc, 0xc5, 0xe6, 0x3e, 0x89, 0x2d, 0x26, 0x68, 0x72, 0xf1, 0x63, 0xa6, 0xfb, 0x52, 0x0c, 0xb7, 0xcb, 0xc7, 0x67, 0x1d, 0x33,
0xd7, 0x28, 0xf5, 0xcc, 0x8e, 0x0e, 0xd1, 0xe0, 0x8c, 0x5b, 0xfb, 0x77, 0x40, 0x2a, 0x63, 0x9d, 0x06, 0xff, 0xed, 0xbb, 0x47, 0xd8, 0xb1, 0xad, 0x48, 0x8f, 0xd9, 0x25, 0x02, 0x77, 0xb1, 0x79,
0x77, 0x74, 0x3c, 0x73, 0xb1, 0xda, 0x7f, 0x69, 0xd5, 0x77, 0x11, 0x1a, 0xc8, 0x7c, 0x7d, 0x86, 0x40, 0xe2, 0x2d, 0x50, 0xbd, 0xe2, 0xa3, 0x4b, 0x34, 0x38, 0xe3, 0xd1, 0xfe, 0x15, 0x90, 0x2a,
0x11, 0xa1, 0xc2, 0x08, 0x3b, 0x44, 0xec, 0x12, 0xe1, 0x52, 0x33, 0xed, 0x58, 0x9b, 0x03, 0x52, 0x08, 0xee, 0x7b, 0x7b, 0x3c, 0x63, 0x58, 0x9d, 0xbf, 0xb4, 0xe6, 0xbe, 0x08, 0x0d, 0x64, 0xbe,
0x16, 0x2d, 0x01, 0xa7, 0x05, 0x06, 0xb0, 0xaa, 0x1e, 0x87, 0xa8, 0x95, 0x38, 0x28, 0x78, 0xda, 0x3c, 0xc7, 0x8a, 0x50, 0xdf, 0x85, 0x1d, 0x22, 0x76, 0x89, 0x60, 0xb6, 0x99, 0x56, 0x35, 0xe7,
0x2e, 0x2b, 0xe7, 0xfa, 0xf9, 0x1b, 0x4a, 0xd7, 0x1d, 0x22, 0x42, 0x8f, 0xce, 0x94, 0x74, 0x8d, 0x80, 0x94, 0xa0, 0x25, 0xe0, 0xb4, 0xc0, 0x00, 0x56, 0xd5, 0xdb, 0x13, 0xb5, 0x12, 0x17, 0x05,
0x82, 0x96, 0xa7, 0x6b, 0x1c, 0xab, 0xc5, 0x1c, 0xa8, 0x7d, 0x4f, 0xb9, 0xff, 0xf1, 0x15, 0xe6, 0x2f, 0xe7, 0x65, 0xb7, 0x05, 0xfd, 0xba, 0x0e, 0xa5, 0xeb, 0x0e, 0x11, 0xa1, 0x37, 0x6d, 0x4a,
0x07, 0x69, 0x87, 0x74, 0x0c, 0xb5, 0xfc, 0x90, 0x5e, 0x00, 0x87, 0x1c, 0xab, 0x18, 0x44, 0x7e, 0xba, 0x46, 0x41, 0xcb, 0xd3, 0x35, 0x8e, 0xd5, 0x62, 0x2e, 0xd4, 0xbe, 0xb5, 0xb9, 0xff, 0xf1,
0xf0, 0x7d, 0x4b, 0xbd, 0x37, 0x87, 0xff, 0x15, 0x38, 0x6d, 0x93, 0xbd, 0xd1, 0x17, 0x20, 0x7d, 0x0d, 0xe6, 0x87, 0x69, 0x3d, 0x20, 0x86, 0x5a, 0xde, 0x03, 0x16, 0xc0, 0x21, 0x8f, 0x55, 0x0c,
0xcf, 0x8d, 0x17, 0xc6, 0x79, 0xda, 0x68, 0x88, 0xbc, 0x92, 0x67, 0x60, 0xf6, 0xb3, 0xf2, 0x53, 0x22, 0x3f, 0xf8, 0x7e, 0x4b, 0xbd, 0x96, 0x87, 0xff, 0x74, 0x38, 0xeb, 0x90, 0xbd, 0xd3, 0xf7,
0x33, 0x0f, 0x61, 0xa3, 0x47, 0x6c, 0x12, 0x61, 0xbe, 0x9f, 0x72, 0xc7, 0x88, 0xc2, 0x32, 0x66, 0x2b, 0x7d, 0x8d, 0x8e, 0xf7, 0xdd, 0x79, 0xda, 0x68, 0x88, 0xbc, 0xf1, 0x67, 0x60, 0xf6, 0xb3,
0xde, 0x3e, 0xac, 0xcb, 0x65, 0x90, 0xe3, 0x5e, 0x73, 0xe2, 0xf2, 0x94, 0x7a, 0x15, 0xc1, 0x04, 0xf2, 0x73, 0x33, 0x0f, 0x61, 0xa3, 0x47, 0x1c, 0x12, 0x61, 0x7e, 0x98, 0x72, 0x85, 0x89, 0xc2,
0xd4, 0x77, 0xb3, 0x40, 0x43, 0x7b, 0x68, 0x3d, 0xf2, 0xc6, 0x88, 0xcf, 0x63, 0xbe, 0xa8, 0x49, 0x32, 0x66, 0xde, 0x01, 0xac, 0xcb, 0x30, 0xc8, 0x75, 0x6f, 0x39, 0x61, 0x3c, 0xa5, 0x5f, 0x45,
0x2f, 0x9e, 0xc6, 0x83, 0x8c, 0xe8, 0xd0, 0x1e, 0x02, 0xb5, 0xdc, 0x06, 0xb3, 0x49, 0x4a, 0x5a, 0x30, 0x01, 0xf5, 0xfd, 0x2c, 0xd0, 0xd0, 0x19, 0x5a, 0x8f, 0x3c, 0x61, 0xe2, 0xfb, 0x98, 0x07,
0xcf, 0x01, 0x19, 0xed, 0x7a, 0x09, 0x45, 0x59, 0xba, 0x3d, 0xca, 0x5b, 0xa9, 0x95, 0xfd, 0x0c, 0x35, 0xe9, 0x41, 0xd5, 0x78, 0x94, 0x11, 0x1d, 0x3a, 0x43, 0xa0, 0xc2, 0x6d, 0x50, 0x87, 0xa4,
0x84, 0x6f, 0xa1, 0xf6, 0x72, 0x4a, 0x5c, 0x2c, 0x88, 0xf4, 0xcb, 0xe3, 0x4d, 0xce, 0xac, 0x18, 0xa4, 0xf5, 0x1c, 0x90, 0xd1, 0x5d, 0xaf, 0xa1, 0x28, 0x5b, 0xb7, 0x47, 0x79, 0x27, 0xb5, 0xb3,
0x2a, 0xf3, 0xb5, 0x19, 0x06, 0x44, 0x9e, 0xe0, 0x4b, 0x4c, 0x98, 0x03, 0x96, 0x9f, 0x6d, 0x61, 0x9f, 0x83, 0x70, 0x1f, 0x6a, 0xaf, 0xa7, 0x84, 0x61, 0x41, 0xa4, 0xbf, 0x3c, 0xde, 0xe4, 0xcc,
0x5c, 0xf8, 0xf0, 0x54, 0xfd, 0x32, 0xb0, 0xa5, 0x02, 0x5e, 0xe4, 0x19, 0x04, 0x14, 0x2e, 0xfc, 0x8a, 0xa1, 0x32, 0xdf, 0xca, 0x61, 0x40, 0x64, 0x05, 0x5f, 0xe2, 0x84, 0x39, 0x60, 0x79, 0x6d,
0x6c, 0xf1, 0xa7, 0xbe, 0xe7, 0xd2, 0x43, 0x6a, 0x93, 0x31, 0x49, 0xc9, 0x80, 0x38, 0x2c, 0xa3, 0x0b, 0xe3, 0xc2, 0xc5, 0x53, 0xcd, 0x4b, 0xc3, 0x96, 0x0a, 0x78, 0x96, 0x67, 0x10, 0x50, 0xb8,
0x45, 0x23, 0x28, 0x2b, 0xe1, 0x1d, 0x17, 0x3b, 0x02, 0x2d, 0x0b, 0xcd, 0x43, 0x04, 0xb4, 0xed, 0xf0, 0xab, 0xc8, 0xdf, 0xfa, 0x1e, 0xb3, 0x8f, 0x6c, 0x87, 0x8c, 0x49, 0x4a, 0x06, 0xc4, 0x61,
0xd3, 0x81, 0x7a, 0x12, 0x26, 0x80, 0x4c, 0x8b, 0x3d, 0x66, 0x53, 0xf3, 0x24, 0x7e, 0xd9, 0xd1, 0x19, 0x5d, 0x34, 0x82, 0xb2, 0x12, 0xde, 0x61, 0xd8, 0x15, 0x68, 0x99, 0x69, 0x1e, 0x22, 0xa0,
0x47, 0xc3, 0x1c, 0x92, 0x72, 0xd9, 0x49, 0x44, 0x06, 0x22, 0xdb, 0x4f, 0x7e, 0x7e, 0x3c, 0xa6, 0x6d, 0x9f, 0x0d, 0xd4, 0x9b, 0x30, 0x01, 0x64, 0x5a, 0xec, 0x51, 0xc7, 0x36, 0x4f, 0xe3, 0x97,
0x62, 0x7f, 0x36, 0x92, 0x53, 0xdc, 0x54, 0x03, 0x1f, 0x50, 0xe6, 0xff, 0xda, 0x0c, 0x06, 0x6f, 0x1d, 0x5d, 0x1a, 0xe6, 0x90, 0x94, 0xcb, 0x4e, 0x22, 0x32, 0x10, 0xd9, 0x7e, 0xf6, 0xe3, 0xd3,
0x7a, 0x5c, 0x9b, 0x3a, 0x81, 0xa6, 0xa3, 0xd1, 0xaa, 0xd7, 0xf5, 0xe8, 0xff, 0x00, 0x00, 0x00, 0xb1, 0x2d, 0x0e, 0x66, 0x23, 0xb9, 0xc5, 0x4d, 0xb5, 0xf0, 0x91, 0x4d, 0xfd, 0x5f, 0x9b, 0xc1,
0xff, 0xff, 0xb6, 0xc3, 0xbe, 0xa7, 0x89, 0x17, 0x00, 0x00, 0xe2, 0x4d, 0x8f, 0x6b, 0x53, 0x27, 0xd0, 0x74, 0x34, 0x5a, 0xf5, 0xa6, 0x9e, 0xfc, 0x1b, 0x00,
0x00, 0xff, 0xff, 0x45, 0x45, 0x76, 0x4c, 0xe8, 0x17, 0x00, 0x00,
} }
// Reference imports to suppress errors if they are not otherwise used. // Reference imports to suppress errors if they are not otherwise used.
@ -822,6 +823,7 @@ type RootCoordClient interface {
// //
// @return StringListResponse, collection name list // @return StringListResponse, collection name list
ShowCollections(ctx context.Context, in *milvuspb.ShowCollectionsRequest, opts ...grpc.CallOption) (*milvuspb.ShowCollectionsResponse, error) ShowCollections(ctx context.Context, in *milvuspb.ShowCollectionsRequest, opts ...grpc.CallOption) (*milvuspb.ShowCollectionsResponse, error)
AlterCollection(ctx context.Context, in *milvuspb.AlterCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
//* //*
// @brief This method is used to create partition // @brief This method is used to create partition
// //
@ -983,6 +985,15 @@ func (c *rootCoordClient) ShowCollections(ctx context.Context, in *milvuspb.Show
return out, nil return out, nil
} }
func (c *rootCoordClient) AlterCollection(ctx context.Context, in *milvuspb.AlterCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
out := new(commonpb.Status)
err := c.cc.Invoke(ctx, "/milvus.proto.rootcoord.RootCoord/AlterCollection", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *rootCoordClient) CreatePartition(ctx context.Context, in *milvuspb.CreatePartitionRequest, opts ...grpc.CallOption) (*commonpb.Status, error) { func (c *rootCoordClient) CreatePartition(ctx context.Context, in *milvuspb.CreatePartitionRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
out := new(commonpb.Status) out := new(commonpb.Status)
err := c.cc.Invoke(ctx, "/milvus.proto.rootcoord.RootCoord/CreatePartition", in, out, opts...) err := c.cc.Invoke(ctx, "/milvus.proto.rootcoord.RootCoord/CreatePartition", in, out, opts...)
@ -1276,6 +1287,7 @@ type RootCoordServer interface {
// //
// @return StringListResponse, collection name list // @return StringListResponse, collection name list
ShowCollections(context.Context, *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error) ShowCollections(context.Context, *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error)
AlterCollection(context.Context, *milvuspb.AlterCollectionRequest) (*commonpb.Status, error)
//* //*
// @brief This method is used to create partition // @brief This method is used to create partition
// //
@ -1367,6 +1379,9 @@ func (*UnimplementedRootCoordServer) AlterAlias(ctx context.Context, req *milvus
func (*UnimplementedRootCoordServer) ShowCollections(ctx context.Context, req *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error) { func (*UnimplementedRootCoordServer) ShowCollections(ctx context.Context, req *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ShowCollections not implemented") return nil, status.Errorf(codes.Unimplemented, "method ShowCollections not implemented")
} }
func (*UnimplementedRootCoordServer) AlterCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return nil, status.Errorf(codes.Unimplemented, "method AlterCollection not implemented")
}
func (*UnimplementedRootCoordServer) CreatePartition(ctx context.Context, req *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (*UnimplementedRootCoordServer) CreatePartition(ctx context.Context, req *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreatePartition not implemented") return nil, status.Errorf(codes.Unimplemented, "method CreatePartition not implemented")
} }
@ -1654,6 +1669,24 @@ func _RootCoord_ShowCollections_Handler(srv interface{}, ctx context.Context, de
return interceptor(ctx, in, info, handler) return interceptor(ctx, in, info, handler)
} }
func _RootCoord_AlterCollection_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(milvuspb.AlterCollectionRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(RootCoordServer).AlterCollection(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/milvus.proto.rootcoord.RootCoord/AlterCollection",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(RootCoordServer).AlterCollection(ctx, req.(*milvuspb.AlterCollectionRequest))
}
return interceptor(ctx, in, info, handler)
}
func _RootCoord_CreatePartition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { func _RootCoord_CreatePartition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(milvuspb.CreatePartitionRequest) in := new(milvuspb.CreatePartitionRequest)
if err := dec(in); err != nil { if err := dec(in); err != nil {
@ -2206,6 +2239,10 @@ var _RootCoord_serviceDesc = grpc.ServiceDesc{
MethodName: "ShowCollections", MethodName: "ShowCollections",
Handler: _RootCoord_ShowCollections_Handler, Handler: _RootCoord_ShowCollections_Handler,
}, },
{
MethodName: "AlterCollection",
Handler: _RootCoord_AlterCollection_Handler,
},
{ {
MethodName: "CreatePartition", MethodName: "CreatePartition",
Handler: _RootCoord_CreatePartition_Handler, Handler: _RootCoord_CreatePartition_Handler,

View File

@ -126,6 +126,10 @@ func (coord *DataCoordMock) MarkSegmentsDropped(ctx context.Context, req *datapb
panic("implement me") panic("implement me")
} }
func (coord *DataCoordMock) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
panic("implement me")
}
func (coord *DataCoordMock) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) { func (coord *DataCoordMock) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) {
panic("implement me") panic("implement me")
} }

View File

@ -992,6 +992,94 @@ func (node *Proxy) ShowCollections(ctx context.Context, request *milvuspb.ShowCo
return sct.result, nil return sct.result, nil
} }
func (node *Proxy) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
if !node.checkHealthy() {
return unhealthyStatus(), nil
}
sp, ctx := trace.StartSpanFromContextWithOperationName(ctx, "Proxy-AlterCollection")
defer sp.Finish()
traceID, _, _ := trace.InfoFromSpan(sp)
method := "AlterCollection"
tr := timerecord.NewTimeRecorder(method)
metrics.ProxyDDLFunctionCall.WithLabelValues(strconv.FormatInt(Params.ProxyCfg.GetNodeID(), 10), method, metrics.TotalLabel).Inc()
act := &alterCollectionTask{
ctx: ctx,
Condition: NewTaskCondition(ctx),
AlterCollectionRequest: request,
rootCoord: node.rootCoord,
}
log.Debug(
rpcReceived(method),
zap.String("traceID", traceID),
zap.String("role", typeutil.ProxyRole),
zap.String("db", request.DbName),
zap.String("collection", request.CollectionName))
if err := node.sched.ddQueue.Enqueue(act); err != nil {
log.Warn(
rpcFailedToEnqueue(method),
zap.Error(err),
zap.String("traceID", traceID),
zap.String("role", typeutil.ProxyRole),
zap.String("db", request.DbName),
zap.String("collection", request.CollectionName))
metrics.ProxyDDLFunctionCall.WithLabelValues(strconv.FormatInt(Params.ProxyCfg.GetNodeID(), 10), method, metrics.AbandonLabel).Inc()
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: err.Error(),
}, nil
}
log.Debug(
rpcEnqueued(method),
zap.String("traceID", traceID),
zap.String("role", typeutil.ProxyRole),
zap.Int64("MsgID", act.ID()),
zap.Uint64("BeginTs", act.BeginTs()),
zap.Uint64("EndTs", act.EndTs()),
zap.Uint64("timestamp", request.Base.Timestamp),
zap.String("db", request.DbName),
zap.String("collection", request.CollectionName))
if err := act.WaitToFinish(); err != nil {
log.Warn(
rpcFailedToWaitToFinish(method),
zap.Error(err),
zap.String("traceID", traceID),
zap.String("role", typeutil.ProxyRole),
zap.Int64("MsgID", act.ID()),
zap.Uint64("BeginTs", act.BeginTs()),
zap.Uint64("EndTs", act.EndTs()),
zap.String("db", request.DbName),
zap.String("collection", request.CollectionName))
metrics.ProxyDDLFunctionCall.WithLabelValues(strconv.FormatInt(Params.ProxyCfg.GetNodeID(), 10), method, metrics.FailLabel).Inc()
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: err.Error(),
}, nil
}
log.Debug(
rpcDone(method),
zap.String("traceID", traceID),
zap.String("role", typeutil.ProxyRole),
zap.Int64("MsgID", act.ID()),
zap.Uint64("BeginTs", act.BeginTs()),
zap.Uint64("EndTs", act.EndTs()),
zap.String("db", request.DbName),
zap.String("collection", request.CollectionName))
metrics.ProxyDDLFunctionCall.WithLabelValues(strconv.FormatInt(Params.ProxyCfg.GetNodeID(), 10), method, metrics.SuccessLabel).Inc()
metrics.ProxyDDLReqLatency.WithLabelValues(strconv.FormatInt(Params.ProxyCfg.GetNodeID(), 10), method).Observe(float64(tr.ElapseSpan().Milliseconds()))
return act.result, nil
}
// CreatePartition create a partition in specific collection. // CreatePartition create a partition in specific collection.
func (node *Proxy) CreatePartition(ctx context.Context, request *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (node *Proxy) CreatePartition(ctx context.Context, request *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
if !node.checkHealthy() { if !node.checkHealthy() {

View File

@ -920,6 +920,24 @@ func TestProxy(t *testing.T) {
assert.Equal(t, 1, len(resp.CollectionNames), resp.CollectionNames) assert.Equal(t, 1, len(resp.CollectionNames), resp.CollectionNames)
}) })
wg.Add(1)
t.Run("alter collection", func(t *testing.T) {
defer wg.Done()
resp, err := proxy.AlterCollection(ctx, &milvuspb.AlterCollectionRequest{
Base: nil,
DbName: dbName,
CollectionName: "cn",
Properties: []*commonpb.KeyValuePair{
{
Key: common.CollectionTTLConfigKey,
Value: "3600",
},
},
})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.ErrorCode)
})
wg.Add(1) wg.Add(1)
t.Run("create partition", func(t *testing.T) { t.Run("create partition", func(t *testing.T) {
defer wg.Done() defer wg.Done()
@ -2406,6 +2424,18 @@ func TestProxy(t *testing.T) {
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode) assert.NotEqual(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode)
}) })
wg.Add(1)
t.Run("alter collection fail, unhealthy", func(t *testing.T) {
defer wg.Done()
resp, err := proxy.AlterCollection(ctx, &milvuspb.AlterCollectionRequest{
Base: nil,
DbName: dbName,
CollectionName: "cn",
})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.ErrorCode)
})
wg.Add(1) wg.Add(1)
t.Run("CreatePartition fail, unhealthy", func(t *testing.T) { t.Run("CreatePartition fail, unhealthy", func(t *testing.T) {
defer wg.Done() defer wg.Done()
@ -2737,6 +2767,18 @@ func TestProxy(t *testing.T) {
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode) assert.NotEqual(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode)
}) })
wg.Add(1)
t.Run("alter collection fail, dd queue full", func(t *testing.T) {
defer wg.Done()
resp, err := proxy.AlterCollection(ctx, &milvuspb.AlterCollectionRequest{
Base: nil,
DbName: dbName,
CollectionName: "cn",
})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.ErrorCode)
})
wg.Add(1) wg.Add(1)
t.Run("CreatePartition fail, dd queue full", func(t *testing.T) { t.Run("CreatePartition fail, dd queue full", func(t *testing.T) {
defer wg.Done() defer wg.Done()
@ -2980,6 +3022,18 @@ func TestProxy(t *testing.T) {
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode) assert.NotEqual(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode)
}) })
wg.Add(1)
t.Run("alter collection fail, timeout", func(t *testing.T) {
defer wg.Done()
resp, err := proxy.AlterCollection(shortCtx, &milvuspb.AlterCollectionRequest{
Base: nil,
DbName: dbName,
CollectionName: "cn",
})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.ErrorCode)
})
wg.Add(1) wg.Add(1)
t.Run("CreatePartition fail, timeout", func(t *testing.T) { t.Run("CreatePartition fail, timeout", func(t *testing.T) {
defer wg.Done() defer wg.Done()

View File

@ -1120,6 +1120,10 @@ func (coord *RootCoordMock) ListPolicy(ctx context.Context, in *internalpb.ListP
return &internalpb.ListPolicyResponse{}, nil return &internalpb.ListPolicyResponse{}, nil
} }
func (coord *RootCoordMock) AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return &commonpb.Status{}, nil
}
type DescribeCollectionFunc func(ctx context.Context, request *milvuspb.DescribeCollectionRequest) (*milvuspb.DescribeCollectionResponse, error) type DescribeCollectionFunc func(ctx context.Context, request *milvuspb.DescribeCollectionRequest) (*milvuspb.DescribeCollectionResponse, error)
type ShowPartitionsFunc func(ctx context.Context, request *milvuspb.ShowPartitionsRequest) (*milvuspb.ShowPartitionsResponse, error) type ShowPartitionsFunc func(ctx context.Context, request *milvuspb.ShowPartitionsRequest) (*milvuspb.ShowPartitionsResponse, error)
type DescribeIndexFunc func(ctx context.Context, request *milvuspb.DescribeIndexRequest) (*milvuspb.DescribeIndexResponse, error) type DescribeIndexFunc func(ctx context.Context, request *milvuspb.DescribeIndexRequest) (*milvuspb.DescribeIndexResponse, error)

View File

@ -73,6 +73,7 @@ const (
CreateAliasTaskName = "CreateAliasTask" CreateAliasTaskName = "CreateAliasTask"
DropAliasTaskName = "DropAliasTask" DropAliasTaskName = "DropAliasTask"
AlterAliasTaskName = "AlterAliasTask" AlterAliasTaskName = "AlterAliasTask"
AlterCollectionTaskName = "AlterCollectionTask"
// minFloat32 minimum float. // minFloat32 minimum float.
minFloat32 = -1 * float32(math.MaxFloat32) minFloat32 = -1 * float32(math.MaxFloat32)
@ -479,6 +480,7 @@ func (dct *describeCollectionTask) Execute(ctx context.Context) error {
dct.result.ShardsNum = result.ShardsNum dct.result.ShardsNum = result.ShardsNum
dct.result.ConsistencyLevel = result.ConsistencyLevel dct.result.ConsistencyLevel = result.ConsistencyLevel
dct.result.Aliases = result.Aliases dct.result.Aliases = result.Aliases
dct.result.Properties = result.Properties
for _, field := range result.Schema.Fields { for _, field := range result.Schema.Fields {
if field.FieldID >= common.StartOfUserFieldID { if field.FieldID >= common.StartOfUserFieldID {
dct.result.Schema.Fields = append(dct.result.Schema.Fields, &schemapb.FieldSchema{ dct.result.Schema.Fields = append(dct.result.Schema.Fields, &schemapb.FieldSchema{
@ -663,6 +665,68 @@ func (sct *showCollectionsTask) PostExecute(ctx context.Context) error {
return nil return nil
} }
type alterCollectionTask struct {
Condition
*milvuspb.AlterCollectionRequest
ctx context.Context
rootCoord types.RootCoord
result *commonpb.Status
}
func (act *alterCollectionTask) TraceCtx() context.Context {
return act.ctx
}
func (act *alterCollectionTask) ID() UniqueID {
return act.Base.MsgID
}
func (act *alterCollectionTask) SetID(uid UniqueID) {
act.Base.MsgID = uid
}
func (act *alterCollectionTask) Name() string {
return AlterCollectionTaskName
}
func (act *alterCollectionTask) Type() commonpb.MsgType {
return act.Base.MsgType
}
func (act *alterCollectionTask) BeginTs() Timestamp {
return act.Base.Timestamp
}
func (act *alterCollectionTask) EndTs() Timestamp {
return act.Base.Timestamp
}
func (act *alterCollectionTask) SetTs(ts Timestamp) {
act.Base.Timestamp = ts
}
func (act *alterCollectionTask) OnEnqueue() error {
act.Base = &commonpb.MsgBase{}
return nil
}
func (act *alterCollectionTask) PreExecute(ctx context.Context) error {
act.Base.MsgType = commonpb.MsgType_AlterCollection
act.Base.SourceID = Params.ProxyCfg.GetNodeID()
return nil
}
func (act *alterCollectionTask) Execute(ctx context.Context) error {
var err error
act.result, err = act.rootCoord.AlterCollection(ctx, act.AlterCollectionRequest)
return err
}
func (act *alterCollectionTask) PostExecute(ctx context.Context) error {
return nil
}
type createPartitionTask struct { type createPartitionTask struct {
Condition Condition
*milvuspb.CreatePartitionRequest *milvuspb.CreatePartitionRequest

View File

@ -0,0 +1,66 @@
package rootcoord
import (
"context"
"errors"
"fmt"
"github.com/milvus-io/milvus/internal/log"
"go.uber.org/zap"
"github.com/milvus-io/milvus/api/milvuspb"
)
type alterCollectionTask struct {
baseTask
Req *milvuspb.AlterCollectionRequest
}
func (a *alterCollectionTask) Prepare(ctx context.Context) error {
if a.Req.GetCollectionName() == "" {
return fmt.Errorf("alter collection failed, collection name does not exists")
}
return nil
}
func (a *alterCollectionTask) Execute(ctx context.Context) error {
// Now we only support alter properties of collection
if a.Req.GetProperties() == nil {
return errors.New("only support alter collection properties, but collection properties is empty")
}
oldColl, err := a.core.meta.GetCollectionByName(ctx, a.Req.GetCollectionName(), a.ts)
if err != nil {
log.Warn("get collection failed during changing collection state",
zap.String("collectionName", a.Req.GetCollectionName()), zap.Uint64("ts", a.ts))
return err
}
newColl := oldColl.Clone()
newColl.Properties = a.Req.GetProperties()
ts := a.GetTs()
redoTask := newBaseRedoTask(a.core.stepExecutor)
redoTask.AddSyncStep(&AlterCollectionStep{
baseStep: baseStep{core: a.core},
oldColl: oldColl,
newColl: newColl,
ts: ts,
})
redoTask.AddSyncStep(&expireCacheStep{
baseStep: baseStep{core: a.core},
collectionID: oldColl.CollectionID,
ts: ts,
})
a.Req.CollectionID = oldColl.CollectionID
redoTask.AddSyncStep(&BroadcastAlteredCollectionStep{
baseStep: baseStep{core: a.core},
req: a.Req,
core: a.core,
})
return redoTask.Execute(ctx)
}

View File

@ -0,0 +1,143 @@
package rootcoord
import (
"context"
"errors"
"testing"
"github.com/milvus-io/milvus/internal/metastore/model"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/stretchr/testify/assert"
"github.com/milvus-io/milvus/api/milvuspb"
)
func Test_alterCollectionTask_Prepare(t *testing.T) {
t.Run("invalid collectionID", func(t *testing.T) {
task := &alterCollectionTask{Req: &milvuspb.AlterCollectionRequest{Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_AlterCollection}}}
err := task.Prepare(context.Background())
assert.Error(t, err)
})
t.Run("normal case", func(t *testing.T) {
task := &alterCollectionTask{
Req: &milvuspb.AlterCollectionRequest{
Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_AlterCollection},
CollectionName: "cn",
},
}
err := task.Prepare(context.Background())
assert.NoError(t, err)
})
}
func Test_alterCollectionTask_Execute(t *testing.T) {
properties := []*commonpb.KeyValuePair{
{
Key: common.CollectionTTLConfigKey,
Value: "3600",
},
}
t.Run("properties is empty", func(t *testing.T) {
task := &alterCollectionTask{Req: &milvuspb.AlterCollectionRequest{Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_AlterCollection}}}
err := task.Execute(context.Background())
assert.Error(t, err)
})
t.Run("failed to create alias", func(t *testing.T) {
core := newTestCore(withInvalidMeta())
task := &alterCollectionTask{
baseTask: baseTask{core: core},
Req: &milvuspb.AlterCollectionRequest{
Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_AlterCollection},
CollectionName: "cn",
Properties: properties,
},
}
err := task.Execute(context.Background())
assert.Error(t, err)
})
t.Run("alter step failed", func(t *testing.T) {
meta := newMockMetaTable()
meta.GetCollectionByNameFunc = func(ctx context.Context, collectionName string, ts Timestamp) (*model.Collection, error) {
return &model.Collection{CollectionID: int64(1)}, nil
}
meta.AlterCollectionFunc = func(ctx context.Context, oldColl *model.Collection, newColl *model.Collection, ts Timestamp) error {
return errors.New("err")
}
core := newTestCore(withMeta(meta))
task := &alterCollectionTask{
baseTask: baseTask{core: core},
Req: &milvuspb.AlterCollectionRequest{
Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_AlterCollection},
CollectionName: "cn",
Properties: properties,
},
}
err := task.Execute(context.Background())
assert.Error(t, err)
})
t.Run("broadcast step failed", func(t *testing.T) {
meta := newMockMetaTable()
meta.GetCollectionByNameFunc = func(ctx context.Context, collectionName string, ts Timestamp) (*model.Collection, error) {
return &model.Collection{CollectionID: int64(1)}, nil
}
meta.AlterCollectionFunc = func(ctx context.Context, oldColl *model.Collection, newColl *model.Collection, ts Timestamp) error {
return nil
}
broker := newMockBroker()
broker.BroadCastAlteredCollectionFunc = func(ctx context.Context, req *milvuspb.AlterCollectionRequest) error {
return errors.New("err")
}
core := newTestCore(withValidProxyManager(), withMeta(meta), withBroker(broker))
task := &alterCollectionTask{
baseTask: baseTask{core: core},
Req: &milvuspb.AlterCollectionRequest{
Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_AlterCollection},
CollectionName: "cn",
Properties: properties,
},
}
err := task.Execute(context.Background())
assert.Error(t, err)
})
t.Run("alter successfully", func(t *testing.T) {
meta := newMockMetaTable()
meta.GetCollectionByNameFunc = func(ctx context.Context, collectionName string, ts Timestamp) (*model.Collection, error) {
return &model.Collection{CollectionID: int64(1)}, nil
}
meta.AlterCollectionFunc = func(ctx context.Context, oldColl *model.Collection, newColl *model.Collection, ts Timestamp) error {
return nil
}
broker := newMockBroker()
broker.BroadCastAlteredCollectionFunc = func(ctx context.Context, req *milvuspb.AlterCollectionRequest) error {
return nil
}
core := newTestCore(withValidProxyManager(), withMeta(meta), withBroker(broker))
task := &alterCollectionTask{
baseTask: baseTask{core: core},
Req: &milvuspb.AlterCollectionRequest{
Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_AlterCollection},
CollectionName: "cn",
Properties: properties,
},
}
err := task.Execute(context.Background())
assert.NoError(t, err)
})
}

View File

@ -5,6 +5,8 @@ import (
"errors" "errors"
"fmt" "fmt"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/indexpb"
"github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/datapb"
@ -42,6 +44,8 @@ type Broker interface {
DropCollectionIndex(ctx context.Context, collID UniqueID, partIDs []UniqueID) error DropCollectionIndex(ctx context.Context, collID UniqueID, partIDs []UniqueID) error
GetSegmentIndexState(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error) GetSegmentIndexState(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error)
DescribeIndex(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) DescribeIndex(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error)
BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) error
} }
type ServerBroker struct { type ServerBroker struct {
@ -226,6 +230,18 @@ func (b *ServerBroker) GetSegmentIndexState(ctx context.Context, collID UniqueID
return resp.GetStates(), nil return resp.GetStates(), nil
} }
func (b *ServerBroker) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) error {
resp, err := b.s.dataCoord.BroadCastAlteredCollection(ctx, req)
if err != nil {
return err
}
if resp.ErrorCode != commonpb.ErrorCode_Success {
return errors.New(resp.Reason)
}
return nil
}
func (b *ServerBroker) DescribeIndex(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) { func (b *ServerBroker) DescribeIndex(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) {
return b.s.indexCoord.DescribeIndex(ctx, &indexpb.DescribeIndexRequest{ return b.s.indexCoord.DescribeIndex(ctx, &indexpb.DescribeIndexRequest{
CollectionID: colID, CollectionID: colID,

View File

@ -4,6 +4,8 @@ import (
"context" "context"
"testing" "testing"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/indexpb"
"github.com/milvus-io/milvus/api/commonpb" "github.com/milvus-io/milvus/api/commonpb"
@ -276,3 +278,29 @@ func TestServerBroker_GetSegmentIndexState(t *testing.T) {
assert.Equal(t, commonpb.IndexState_Finished, states[0].GetState()) assert.Equal(t, commonpb.IndexState_Finished, states[0].GetState())
}) })
} }
func TestServerBroker_BroadCastAlteredCollection(t *testing.T) {
t.Run("failed to execute", func(t *testing.T) {
c := newTestCore(withInvalidDataCoord())
b := newServerBroker(c)
ctx := context.Background()
err := b.BroadCastAlteredCollection(ctx, &milvuspb.AlterCollectionRequest{})
assert.Error(t, err)
})
t.Run("non success error code on execute", func(t *testing.T) {
c := newTestCore(withFailedDataCoord())
b := newServerBroker(c)
ctx := context.Background()
err := b.BroadCastAlteredCollection(ctx, &milvuspb.AlterCollectionRequest{})
assert.Error(t, err)
})
t.Run("success", func(t *testing.T) {
c := newTestCore(withValidDataCoord())
b := newServerBroker(c)
ctx := context.Background()
err := b.BroadCastAlteredCollection(ctx, &milvuspb.AlterCollectionRequest{})
assert.NoError(t, err)
})
}

View File

@ -242,6 +242,7 @@ func (t *createCollectionTask) Execute(ctx context.Context) error {
State: pb.PartitionState_PartitionCreated, State: pb.PartitionState_PartitionCreated,
}, },
}, },
Properties: t.Req.Properties,
} }
// We cannot check the idempotency inside meta table when adding collection, since we'll execute duplicate steps // We cannot check the idempotency inside meta table when adding collection, since we'll execute duplicate steps

View File

@ -85,6 +85,7 @@ type IMetaTable interface {
CreateAlias(ctx context.Context, alias string, collectionName string, ts Timestamp) error CreateAlias(ctx context.Context, alias string, collectionName string, ts Timestamp) error
DropAlias(ctx context.Context, alias string, ts Timestamp) error DropAlias(ctx context.Context, alias string, ts Timestamp) error
AlterAlias(ctx context.Context, alias string, collectionName string, ts Timestamp) error AlterAlias(ctx context.Context, alias string, collectionName string, ts Timestamp) error
AlterCollection(ctx context.Context, oldColl *model.Collection, newColl *model.Collection, ts Timestamp) error
// TODO: it'll be a big cost if we handle the time travel logic, since we should always list all aliases in catalog. // TODO: it'll be a big cost if we handle the time travel logic, since we should always list all aliases in catalog.
IsAlias(name string) bool IsAlias(name string) bool
@ -365,6 +366,19 @@ func (mt *MetaTable) ListCollectionPhysicalChannels() map[typeutil.UniqueID][]st
return chanMap return chanMap
} }
func (mt *MetaTable) AlterCollection(ctx context.Context, oldColl *model.Collection, newColl *model.Collection, ts Timestamp) error {
mt.ddLock.Lock()
defer mt.ddLock.Unlock()
ctx1 := contextutil.WithTenantID(ctx, Params.CommonCfg.ClusterName)
if err := mt.catalog.AlterCollection(ctx1, oldColl, newColl, metastore.MODIFY, ts); err != nil {
return err
}
mt.collID2Meta[oldColl.CollectionID] = newColl
log.Info("alter collection finished", zap.Int64("collectionID", oldColl.CollectionID), zap.Uint64("ts", ts))
return nil
}
// GetCollectionVirtualChannels returns virtual channels of a given collection. // GetCollectionVirtualChannels returns virtual channels of a given collection.
func (mt *MetaTable) GetCollectionVirtualChannels(colID int64) []string { func (mt *MetaTable) GetCollectionVirtualChannels(colID int64) []string {
mt.ddLock.RLock() mt.ddLock.RLock()

View File

@ -841,3 +841,45 @@ func TestMetaTable_GetCollectionByName(t *testing.T) {
assert.Equal(t, Params.CommonCfg.DefaultPartitionName, coll.Partitions[0].PartitionName) assert.Equal(t, Params.CommonCfg.DefaultPartitionName, coll.Partitions[0].PartitionName)
}) })
} }
func TestMetaTable_AlterCollection(t *testing.T) {
t.Run("alter metastore fail", func(t *testing.T) {
catalog := mocks.NewRootCoordCatalog(t)
catalog.On("AlterCollection",
mock.Anything, // context.Context
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Return(errors.New("error"))
meta := &MetaTable{
catalog: catalog,
collID2Meta: map[typeutil.UniqueID]*model.Collection{},
}
ctx := context.Background()
err := meta.AlterCollection(ctx, nil, nil, 0)
assert.Error(t, err)
})
t.Run("alter collection ok", func(t *testing.T) {
catalog := mocks.NewRootCoordCatalog(t)
catalog.On("AlterCollection",
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Return(nil)
meta := &MetaTable{
catalog: catalog,
collID2Meta: map[typeutil.UniqueID]*model.Collection{},
}
ctx := context.Background()
oldColl := &model.Collection{CollectionID: 1}
newColl := &model.Collection{CollectionID: 1}
err := meta.AlterCollection(ctx, oldColl, newColl, 0)
assert.NoError(t, err)
assert.Equal(t, meta.collID2Meta[1], newColl)
})
}

View File

@ -52,6 +52,7 @@ type mockMetaTable struct {
GetCollectionIDByNameFunc func(name string) (UniqueID, error) GetCollectionIDByNameFunc func(name string) (UniqueID, error)
GetPartitionByNameFunc func(collID UniqueID, partitionName string, ts Timestamp) (UniqueID, error) GetPartitionByNameFunc func(collID UniqueID, partitionName string, ts Timestamp) (UniqueID, error)
GetCollectionVirtualChannelsFunc func(colID int64) []string GetCollectionVirtualChannelsFunc func(colID int64) []string
AlterCollectionFunc func(ctx context.Context, oldColl *model.Collection, newColl *model.Collection, ts Timestamp) error
} }
func (m mockMetaTable) ListCollections(ctx context.Context, ts Timestamp) ([]*model.Collection, error) { func (m mockMetaTable) ListCollections(ctx context.Context, ts Timestamp) ([]*model.Collection, error) {
@ -110,6 +111,10 @@ func (m mockMetaTable) ListAliasesByID(collID UniqueID) []string {
return m.ListAliasesByIDFunc(collID) return m.ListAliasesByIDFunc(collID)
} }
func (m mockMetaTable) AlterCollection(ctx context.Context, oldColl *model.Collection, newColl *model.Collection, ts Timestamp) error {
return m.AlterCollectionFunc(ctx, oldColl, newColl, ts)
}
func (m mockMetaTable) GetCollectionIDByName(name string) (UniqueID, error) { func (m mockMetaTable) GetCollectionIDByName(name string) (UniqueID, error) {
return m.GetCollectionIDByNameFunc(name) return m.GetCollectionIDByNameFunc(name)
} }
@ -151,13 +156,14 @@ func (m mockIndexCoord) DropIndex(ctx context.Context, req *indexpb.DropIndexReq
type mockDataCoord struct { type mockDataCoord struct {
types.DataCoord types.DataCoord
GetComponentStatesFunc func(ctx context.Context) (*milvuspb.ComponentStates, error) GetComponentStatesFunc func(ctx context.Context) (*milvuspb.ComponentStates, error)
WatchChannelsFunc func(ctx context.Context, req *datapb.WatchChannelsRequest) (*datapb.WatchChannelsResponse, error) WatchChannelsFunc func(ctx context.Context, req *datapb.WatchChannelsRequest) (*datapb.WatchChannelsResponse, error)
AcquireSegmentLockFunc func(ctx context.Context, req *datapb.AcquireSegmentLockRequest) (*commonpb.Status, error) AcquireSegmentLockFunc func(ctx context.Context, req *datapb.AcquireSegmentLockRequest) (*commonpb.Status, error)
ReleaseSegmentLockFunc func(ctx context.Context, req *datapb.ReleaseSegmentLockRequest) (*commonpb.Status, error) ReleaseSegmentLockFunc func(ctx context.Context, req *datapb.ReleaseSegmentLockRequest) (*commonpb.Status, error)
FlushFunc func(ctx context.Context, req *datapb.FlushRequest) (*datapb.FlushResponse, error) FlushFunc func(ctx context.Context, req *datapb.FlushRequest) (*datapb.FlushResponse, error)
ImportFunc func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) ImportFunc func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error)
UnsetIsImportingStateFunc func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) UnsetIsImportingStateFunc func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error)
broadCastAlteredCollectionFunc func(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error)
} }
func newMockDataCoord() *mockDataCoord { func newMockDataCoord() *mockDataCoord {
@ -192,6 +198,10 @@ func (m *mockDataCoord) UnsetIsImportingState(ctx context.Context, req *datapb.U
return m.UnsetIsImportingStateFunc(ctx, req) return m.UnsetIsImportingStateFunc(ctx, req)
} }
func (m *mockDataCoord) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return m.broadCastAlteredCollectionFunc(ctx, req)
}
type mockQueryCoord struct { type mockQueryCoord struct {
types.QueryCoord types.QueryCoord
GetSegmentInfoFunc func(ctx context.Context, req *querypb.GetSegmentInfoRequest) (*querypb.GetSegmentInfoResponse, error) GetSegmentInfoFunc func(ctx context.Context, req *querypb.GetSegmentInfoRequest) (*querypb.GetSegmentInfoResponse, error)
@ -600,6 +610,9 @@ func withInvalidDataCoord() Opt {
dc.UnsetIsImportingStateFunc = func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) { dc.UnsetIsImportingStateFunc = func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return nil, errors.New("error mock UnsetIsImportingState") return nil, errors.New("error mock UnsetIsImportingState")
} }
dc.broadCastAlteredCollectionFunc = func(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return nil, errors.New("error mock broadCastAlteredCollection")
}
return withDataCoord(dc) return withDataCoord(dc)
} }
@ -638,6 +651,9 @@ func withFailedDataCoord() Opt {
Reason: "mock UnsetIsImportingState error", Reason: "mock UnsetIsImportingState error",
}, nil }, nil
} }
dc.broadCastAlteredCollectionFunc = func(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return failStatus(commonpb.ErrorCode_UnexpectedError, "mock broadcast altered collection error"), nil
}
return withDataCoord(dc) return withDataCoord(dc)
} }
@ -673,6 +689,9 @@ func withValidDataCoord() Opt {
dc.UnsetIsImportingStateFunc = func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) { dc.UnsetIsImportingStateFunc = func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return succStatus(), nil return succStatus(), nil
} }
dc.broadCastAlteredCollectionFunc = func(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
return succStatus(), nil
}
return withDataCoord(dc) return withDataCoord(dc)
} }
@ -785,6 +804,8 @@ type mockBroker struct {
DropCollectionIndexFunc func(ctx context.Context, collID UniqueID, partIDs []UniqueID) error DropCollectionIndexFunc func(ctx context.Context, collID UniqueID, partIDs []UniqueID) error
DescribeIndexFunc func(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) DescribeIndexFunc func(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error)
GetSegmentIndexStateFunc func(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error) GetSegmentIndexStateFunc func(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error)
BroadCastAlteredCollectionFunc func(ctx context.Context, req *milvuspb.AlterCollectionRequest) error
} }
func newMockBroker() *mockBroker { func newMockBroker() *mockBroker {
@ -815,6 +836,10 @@ func (b mockBroker) GetSegmentIndexState(ctx context.Context, collID UniqueID, i
return b.GetSegmentIndexStateFunc(ctx, collID, indexName, segIDs) return b.GetSegmentIndexStateFunc(ctx, collID, indexName, segIDs)
} }
func (b mockBroker) BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) error {
return b.BroadCastAlteredCollectionFunc(ctx, req)
}
func withBroker(b Broker) Opt { func withBroker(b Broker) Opt {
return func(c *Core) { return func(c *Core) {
c.broker = b c.broker = b

View File

@ -76,6 +76,20 @@ func (_m *IMetaTable) AlterAlias(ctx context.Context, alias string, collectionNa
return r0 return r0
} }
// AlterCollection provides a mock function with given fields: ctx, oldColl, newColl, ts
func (_m *IMetaTable) AlterCollection(ctx context.Context, oldColl *model.Collection, newColl *model.Collection, ts uint64) error {
ret := _m.Called(ctx, oldColl, newColl, ts)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, *model.Collection, *model.Collection, uint64) error); ok {
r0 = rf(ctx, oldColl, newColl, ts)
} else {
r0 = ret.Error(0)
}
return r0
}
// AlterCredential provides a mock function with given fields: credInfo // AlterCredential provides a mock function with given fields: credInfo
func (_m *IMetaTable) AlterCredential(credInfo *internalpb.CredentialInfo) error { func (_m *IMetaTable) AlterCredential(credInfo *internalpb.CredentialInfo) error {
ret := _m.Called(credInfo) ret := _m.Called(credInfo)

View File

@ -883,7 +883,7 @@ func convertModelToDesc(collInfo *model.Collection, aliases []string) *milvuspb.
resp.Aliases = aliases resp.Aliases = aliases
resp.StartPositions = collInfo.StartPositions resp.StartPositions = collInfo.StartPositions
resp.CollectionName = resp.Schema.Name resp.CollectionName = resp.Schema.Name
resp.Properties = collInfo.Properties
return resp return resp
} }
@ -970,6 +970,55 @@ func (c *Core) ShowCollections(ctx context.Context, in *milvuspb.ShowCollections
return resp, nil return resp, nil
} }
func (c *Core) AlterCollection(ctx context.Context, in *milvuspb.AlterCollectionRequest) (*commonpb.Status, error) {
if code, ok := c.checkHealthy(); !ok {
return failStatus(commonpb.ErrorCode_UnexpectedError, "StateCode="+commonpb.StateCode_name[int32(code)]), nil
}
metrics.RootCoordDDLReqCounter.WithLabelValues("AlterCollection", metrics.TotalLabel).Inc()
tr := timerecord.NewTimeRecorder("AlterCollection")
log.Info("received request to alter collection", zap.String("role", typeutil.RootCoordRole),
zap.String("name", in.GetCollectionName()), zap.Int64("msgID", in.GetBase().GetMsgID()))
t := &alterCollectionTask{
baseTask: baseTask{
ctx: ctx,
core: c,
done: make(chan error, 1),
},
Req: in,
}
if err := c.scheduler.AddTask(t); err != nil {
log.Error("failed to enqueue request to alter collection", zap.String("role", typeutil.RootCoordRole),
zap.Error(err),
zap.String("name", in.GetCollectionName()), zap.Int64("msgID", in.GetBase().GetMsgID()))
metrics.RootCoordDDLReqCounter.WithLabelValues("AlterCollection", metrics.FailLabel).Inc()
return failStatus(commonpb.ErrorCode_UnexpectedError, err.Error()), nil
}
if err := t.WaitToFinish(); err != nil {
log.Error("failed to alter collection", zap.String("role", typeutil.RootCoordRole),
zap.Error(err),
zap.String("name", in.GetCollectionName()),
zap.Int64("msgID", in.GetBase().GetMsgID()), zap.Uint64("ts", t.GetTs()))
metrics.RootCoordDDLReqCounter.WithLabelValues("AlterCollection", metrics.FailLabel).Inc()
return failStatus(commonpb.ErrorCode_UnexpectedError, err.Error()), nil
}
metrics.RootCoordDDLReqCounter.WithLabelValues("AlterCollection", metrics.SuccessLabel).Inc()
metrics.RootCoordDDLReqLatency.WithLabelValues("AlterCollection").Observe(float64(tr.ElapseSpan().Milliseconds()))
metrics.RootCoordNumOfCollections.Dec()
log.Info("done to alter collection", zap.String("role", typeutil.RootCoordRole),
zap.String("name", in.GetCollectionName()), zap.Int64("msgID", in.GetBase().GetMsgID()),
zap.Uint64("ts", t.GetTs()))
return succStatus(), nil
}
// CreatePartition create partition // CreatePartition create partition
func (c *Core) CreatePartition(ctx context.Context, in *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) { func (c *Core) CreatePartition(ctx context.Context, in *milvuspb.CreatePartitionRequest) (*commonpb.Status, error) {
if code, ok := c.checkHealthy(); !ok { if code, ok := c.checkHealthy(); !ok {

View File

@ -1376,3 +1376,43 @@ func TestRootcoord_EnableActiveStandby(t *testing.T) {
err = core.Stop() err = core.Stop()
assert.NoError(t, err) assert.NoError(t, err)
} }
func TestRootCoord_AlterCollection(t *testing.T) {
t.Run("not healthy", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withAbnormalCode())
resp, err := c.AlterCollection(ctx, &milvuspb.AlterCollectionRequest{})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
})
t.Run("add task failed", func(t *testing.T) {
c := newTestCore(withHealthyCode(),
withInvalidScheduler())
ctx := context.Background()
resp, err := c.AlterCollection(ctx, &milvuspb.AlterCollectionRequest{})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
})
t.Run("execute task failed", func(t *testing.T) {
c := newTestCore(withHealthyCode(),
withTaskFailScheduler())
ctx := context.Background()
resp, err := c.AlterCollection(ctx, &milvuspb.AlterCollectionRequest{})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
})
t.Run("run ok", func(t *testing.T) {
c := newTestCore(withHealthyCode(),
withValidScheduler())
ctx := context.Background()
resp, err := c.AlterCollection(ctx, &milvuspb.AlterCollectionRequest{})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
})
}

View File

@ -4,6 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/milvus-io/milvus/api/milvuspb"
pb "github.com/milvus-io/milvus/internal/proto/etcdpb" pb "github.com/milvus-io/milvus/internal/proto/etcdpb"
"github.com/milvus-io/milvus/internal/metastore/model" "github.com/milvus-io/milvus/internal/metastore/model"
@ -339,3 +341,36 @@ func (s *nullStep) Desc() string {
func (s *nullStep) Weight() stepPriority { func (s *nullStep) Weight() stepPriority {
return stepPriorityLow return stepPriorityLow
} }
type AlterCollectionStep struct {
baseStep
oldColl *model.Collection
newColl *model.Collection
ts Timestamp
}
func (a *AlterCollectionStep) Execute(ctx context.Context) ([]nestedStep, error) {
err := a.core.meta.AlterCollection(ctx, a.oldColl, a.newColl, a.ts)
return nil, err
}
func (a *AlterCollectionStep) Desc() string {
return fmt.Sprintf("alter collection, collectionID: %d, ts: %d", a.oldColl.CollectionID, a.ts)
}
type BroadcastAlteredCollectionStep struct {
baseStep
req *milvuspb.AlterCollectionRequest
core *Core
}
func (b *BroadcastAlteredCollectionStep) Execute(ctx context.Context) ([]nestedStep, error) {
// TODO: support online schema change mechanism
// It only broadcast collection properties to DataCoord service
err := b.core.broker.BroadCastAlteredCollection(ctx, b.req)
return nil, err
}
func (b *BroadcastAlteredCollectionStep) Desc() string {
return fmt.Sprintf("broadcast altered collection, collectionID: %d", b.req.CollectionID)
}

View File

@ -324,6 +324,8 @@ type DataCoord interface {
// MarkSegmentsDropped marks the given segments as `dropped` state. // MarkSegmentsDropped marks the given segments as `dropped` state.
MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error)
BroadCastAlteredCollection(ctx context.Context, req *milvuspb.AlterCollectionRequest) (*commonpb.Status, error)
} }
// DataCoordComponent defines the interface of DataCoord component. // DataCoordComponent defines the interface of DataCoord component.
@ -502,6 +504,16 @@ type RootCoord interface {
// error is always nil // error is always nil
ShowCollections(ctx context.Context, req *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error) ShowCollections(ctx context.Context, req *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error)
// AlterCollection notifies Proxy to create a collection
//
// ctx is the context to control request deadline and cancellation
// req contains the request params, including database name(reserved), collection name and collection properties
//
// The `ErrorCode` of `Status` is `Success` if create collection successfully;
// otherwise, the `ErrorCode` of `Status` will be `Error`, and the `Reason` of `Status` will record the fail cause.
// error is always nil
AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error)
// CreatePartition notifies RootCoord to create a partition // CreatePartition notifies RootCoord to create a partition
// //
// ctx is the context to control request deadline and cancellation // ctx is the context to control request deadline and cancellation
@ -871,7 +883,6 @@ type ProxyComponent interface {
// otherwise, the `ErrorCode` of `Status` will be `Error`, and the `Reason` of `Status` will record the fail cause. // otherwise, the `ErrorCode` of `Status` will be `Error`, and the `Reason` of `Status` will record the fail cause.
// error is always nil // error is always nil
CreateCollection(ctx context.Context, request *milvuspb.CreateCollectionRequest) (*commonpb.Status, error) CreateCollection(ctx context.Context, request *milvuspb.CreateCollectionRequest) (*commonpb.Status, error)
// DropCollection notifies Proxy to drop a collection // DropCollection notifies Proxy to drop a collection
// //
// ctx is the context to control request deadline and cancellation // ctx is the context to control request deadline and cancellation
@ -944,6 +955,16 @@ type ProxyComponent interface {
// error is always nil // error is always nil
ShowCollections(ctx context.Context, request *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error) ShowCollections(ctx context.Context, request *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error)
// AlterCollection notifies Proxy to create a collection
//
// ctx is the context to control request deadline and cancellation
// req contains the request params, including database name(reserved), collection name and collection properties
//
// The `ErrorCode` of `Status` is `Success` if create collection successfully;
// otherwise, the `ErrorCode` of `Status` will be `Error`, and the `Reason` of `Status` will record the fail cause.
// error is always nil
AlterCollection(ctx context.Context, request *milvuspb.AlterCollectionRequest) (*commonpb.Status, error)
// CreatePartition notifies Proxy to create a partition // CreatePartition notifies Proxy to create a partition
// //
// ctx is the context to control request deadline and cancellation // ctx is the context to control request deadline and cancellation

View File

@ -160,3 +160,8 @@ func (m *GrpcDataCoordClient) UnsetIsImportingState(context.Context, *datapb.Uns
func (m *GrpcDataCoordClient) MarkSegmentsDropped(context.Context, *datapb.MarkSegmentsDroppedRequest, ...grpc.CallOption) (*commonpb.Status, error) { func (m *GrpcDataCoordClient) MarkSegmentsDropped(context.Context, *datapb.MarkSegmentsDroppedRequest, ...grpc.CallOption) (*commonpb.Status, error) {
return &commonpb.Status{}, m.Err return &commonpb.Status{}, m.Err
} }
func (m *GrpcDataCoordClient) BroadCastAlteredCollection(ctx context.Context, in *milvuspb.AlterCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
return &commonpb.Status{}, m.Err
}

View File

@ -216,3 +216,7 @@ func (m *GrpcRootCoordClient) ListCredUsers(ctx context.Context, in *milvuspb.Li
func (m *GrpcRootCoordClient) GetCredential(ctx context.Context, in *rootcoordpb.GetCredentialRequest, opts ...grpc.CallOption) (*rootcoordpb.GetCredentialResponse, error) { func (m *GrpcRootCoordClient) GetCredential(ctx context.Context, in *rootcoordpb.GetCredentialRequest, opts ...grpc.CallOption) (*rootcoordpb.GetCredentialResponse, error) {
return &rootcoordpb.GetCredentialResponse{}, m.Err return &rootcoordpb.GetCredentialResponse{}, m.Err
} }
func (m *GrpcRootCoordClient) AlterCollection(ctx context.Context, in *milvuspb.AlterCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
return &commonpb.Status{}, m.Err
}

View File

@ -21,6 +21,7 @@ CREATE TABLE if not exists milvus_meta.collections (
start_position TEXT, start_position TEXT,
consistency_level INT, consistency_level INT,
status INT NOT NULL, status INT NOT NULL,
properties VARCHAR(512),
ts BIGINT UNSIGNED DEFAULT 0, ts BIGINT UNSIGNED DEFAULT 0,
is_deleted BOOL DEFAULT FALSE, is_deleted BOOL DEFAULT FALSE,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,