mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-07 17:48:29 +08:00
feat: add bulk insert support for Functions (#36715)
issue: https://github.com/milvus-io/milvus/issues/35853 and https://github.com/milvus-io/milvus/issues/35856 Signed-off-by: Buqian Zheng <zhengbuqian@gmail.com>
This commit is contained in:
parent
d45e2677b9
commit
82c5cf2fa2
@ -33,7 +33,7 @@ func newHashedData(schema *schemapb.CollectionSchema, channelNum, partitionNum i
|
|||||||
for i := 0; i < channelNum; i++ {
|
for i := 0; i < channelNum; i++ {
|
||||||
res[i] = make([]*storage.InsertData, partitionNum)
|
res[i] = make([]*storage.InsertData, partitionNum)
|
||||||
for j := 0; j < partitionNum; j++ {
|
for j := 0; j < partitionNum; j++ {
|
||||||
res[i][j], err = storage.NewInsertData(schema)
|
res[i][j], err = storage.NewInsertDataWithFunctionOutputField(schema)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -195,6 +195,12 @@ func (t *ImportTask) importFile(reader importutilv2.Reader) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if !importutilv2.IsBackup(t.req.GetOptions()) {
|
||||||
|
err = RunEmbeddingFunction(t, data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
hashedData, err := HashData(t, data)
|
hashedData, err := HashData(t, data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -236,8 +242,17 @@ func (t *ImportTask) sync(hashedData HashedData) ([]*conc.Future[struct{}], []sy
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
bm25Stats := make(map[int64]*storage.BM25Stats)
|
||||||
|
for _, fn := range t.req.GetSchema().GetFunctions() {
|
||||||
|
if fn.GetType() == schemapb.FunctionType_BM25 {
|
||||||
|
// BM25 function guarantees single output field
|
||||||
|
outputSparseFieldId := fn.GetOutputFieldIds()[0]
|
||||||
|
bm25Stats[outputSparseFieldId] = storage.NewBM25Stats()
|
||||||
|
bm25Stats[outputSparseFieldId].AppendFieldData(data.Data[outputSparseFieldId].(*storage.SparseFloatVectorFieldData))
|
||||||
|
}
|
||||||
|
}
|
||||||
syncTask, err := NewSyncTask(t.ctx, t.allocator, t.metaCaches, t.req.GetTs(),
|
syncTask, err := NewSyncTask(t.ctx, t.allocator, t.metaCaches, t.req.GetTs(),
|
||||||
segmentID, partitionID, t.GetCollectionID(), channel, data, nil)
|
segmentID, partitionID, t.GetCollectionID(), channel, data, nil, bm25Stats)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -227,7 +227,7 @@ func (t *L0ImportTask) syncDelete(delData []*storage.DeleteData) ([]*conc.Future
|
|||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
syncTask, err := NewSyncTask(t.ctx, t.allocator, t.metaCaches, t.req.GetTs(),
|
syncTask, err := NewSyncTask(t.ctx, t.allocator, t.metaCaches, t.req.GetTs(),
|
||||||
segmentID, partitionID, t.GetCollectionID(), channel, nil, data)
|
segmentID, partitionID, t.GetCollectionID(), channel, nil, data, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@ -34,6 +34,7 @@ import (
|
|||||||
"github.com/milvus-io/milvus/internal/flushcommon/syncmgr"
|
"github.com/milvus-io/milvus/internal/flushcommon/syncmgr"
|
||||||
"github.com/milvus-io/milvus/internal/proto/datapb"
|
"github.com/milvus-io/milvus/internal/proto/datapb"
|
||||||
"github.com/milvus-io/milvus/internal/storage"
|
"github.com/milvus-io/milvus/internal/storage"
|
||||||
|
"github.com/milvus-io/milvus/internal/util/function"
|
||||||
"github.com/milvus-io/milvus/pkg/common"
|
"github.com/milvus-io/milvus/pkg/common"
|
||||||
"github.com/milvus-io/milvus/pkg/log"
|
"github.com/milvus-io/milvus/pkg/log"
|
||||||
"github.com/milvus-io/milvus/pkg/metrics"
|
"github.com/milvus-io/milvus/pkg/metrics"
|
||||||
@ -52,6 +53,7 @@ func NewSyncTask(ctx context.Context,
|
|||||||
segmentID, partitionID, collectionID int64, vchannel string,
|
segmentID, partitionID, collectionID int64, vchannel string,
|
||||||
insertData *storage.InsertData,
|
insertData *storage.InsertData,
|
||||||
deleteData *storage.DeleteData,
|
deleteData *storage.DeleteData,
|
||||||
|
bm25Stats map[int64]*storage.BM25Stats,
|
||||||
) (syncmgr.Task, error) {
|
) (syncmgr.Task, error) {
|
||||||
metaCache := metaCaches[vchannel]
|
metaCache := metaCaches[vchannel]
|
||||||
if _, ok := metaCache.GetSegmentByID(segmentID); !ok {
|
if _, ok := metaCache.GetSegmentByID(segmentID); !ok {
|
||||||
@ -94,6 +96,9 @@ func NewSyncTask(ctx context.Context,
|
|||||||
WithLevel(segmentLevel).
|
WithLevel(segmentLevel).
|
||||||
WithDataSource(metrics.BulkinsertDataSourceLabel).
|
WithDataSource(metrics.BulkinsertDataSourceLabel).
|
||||||
WithBatchSize(int64(insertData.GetRowNum()))
|
WithBatchSize(int64(insertData.GetRowNum()))
|
||||||
|
if bm25Stats != nil {
|
||||||
|
syncPack.WithBM25Stats(bm25Stats)
|
||||||
|
}
|
||||||
|
|
||||||
return serializer.EncodeBuffer(ctx, syncPack)
|
return serializer.EncodeBuffer(ctx, syncPack)
|
||||||
}
|
}
|
||||||
@ -202,6 +207,49 @@ func AppendSystemFieldsData(task *ImportTask, data *storage.InsertData) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func RunEmbeddingFunction(task *ImportTask, data *storage.InsertData) error {
|
||||||
|
fns := task.GetSchema().GetFunctions()
|
||||||
|
for _, fn := range fns {
|
||||||
|
runner, err := function.NewFunctionRunner(task.GetSchema(), fn)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
inputDatas := make([]any, 0, len(fn.InputFieldIds))
|
||||||
|
for _, inputFieldID := range fn.InputFieldIds {
|
||||||
|
inputDatas = append(inputDatas, data.Data[inputFieldID].GetDataRows())
|
||||||
|
}
|
||||||
|
outputFieldData, err := runner.BatchRun(inputDatas...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for i, outputFieldID := range fn.OutputFieldIds {
|
||||||
|
outputField := typeutil.GetField(task.GetSchema(), outputFieldID)
|
||||||
|
// TODO: added support for vector output field only, scalar output field in function is not supported yet
|
||||||
|
switch outputField.GetDataType() {
|
||||||
|
case schemapb.DataType_FloatVector:
|
||||||
|
data.Data[outputFieldID] = outputFieldData[i].(*storage.FloatVectorFieldData)
|
||||||
|
case schemapb.DataType_BFloat16Vector:
|
||||||
|
data.Data[outputFieldID] = outputFieldData[i].(*storage.BFloat16VectorFieldData)
|
||||||
|
case schemapb.DataType_Float16Vector:
|
||||||
|
data.Data[outputFieldID] = outputFieldData[i].(*storage.Float16VectorFieldData)
|
||||||
|
case schemapb.DataType_BinaryVector:
|
||||||
|
data.Data[outputFieldID] = outputFieldData[i].(*storage.BinaryVectorFieldData)
|
||||||
|
case schemapb.DataType_SparseFloatVector:
|
||||||
|
sparseArray := outputFieldData[i].(*schemapb.SparseFloatArray)
|
||||||
|
data.Data[outputFieldID] = &storage.SparseFloatVectorFieldData{
|
||||||
|
SparseFloatArray: schemapb.SparseFloatArray{
|
||||||
|
Dim: sparseArray.GetDim(),
|
||||||
|
Contents: sparseArray.GetContents(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unsupported output data type for embedding function: %s", outputField.GetDataType().String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func GetInsertDataRowCount(data *storage.InsertData, schema *schemapb.CollectionSchema) int {
|
func GetInsertDataRowCount(data *storage.InsertData, schema *schemapb.CollectionSchema) int {
|
||||||
fields := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) int64 {
|
fields := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) int64 {
|
||||||
return field.GetFieldID()
|
return field.GetFieldID()
|
||||||
|
|||||||
@ -50,10 +50,14 @@ type InsertData struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func NewInsertData(schema *schemapb.CollectionSchema) (*InsertData, error) {
|
func NewInsertData(schema *schemapb.CollectionSchema) (*InsertData, error) {
|
||||||
return NewInsertDataWithCap(schema, 0)
|
return NewInsertDataWithCap(schema, 0, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewInsertDataWithCap(schema *schemapb.CollectionSchema, cap int) (*InsertData, error) {
|
func NewInsertDataWithFunctionOutputField(schema *schemapb.CollectionSchema) (*InsertData, error) {
|
||||||
|
return NewInsertDataWithCap(schema, 0, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewInsertDataWithCap(schema *schemapb.CollectionSchema, cap int, withFunctionOutput bool) (*InsertData, error) {
|
||||||
if schema == nil {
|
if schema == nil {
|
||||||
return nil, merr.WrapErrParameterMissing("collection schema")
|
return nil, merr.WrapErrParameterMissing("collection schema")
|
||||||
}
|
}
|
||||||
@ -69,6 +73,14 @@ func NewInsertDataWithCap(schema *schemapb.CollectionSchema, cap int) (*InsertDa
|
|||||||
if field.IsPartitionKey && field.GetNullable() {
|
if field.IsPartitionKey && field.GetNullable() {
|
||||||
return nil, merr.WrapErrParameterInvalidMsg("partition key field not support nullable")
|
return nil, merr.WrapErrParameterInvalidMsg("partition key field not support nullable")
|
||||||
}
|
}
|
||||||
|
if field.IsFunctionOutput {
|
||||||
|
if field.IsPrimaryKey || field.IsPartitionKey {
|
||||||
|
return nil, merr.WrapErrParameterInvalidMsg("function output field should not be primary key or partition key")
|
||||||
|
}
|
||||||
|
if !withFunctionOutput {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
fieldData, err := NewFieldData(field.DataType, field, cap)
|
fieldData, err := NewFieldData(field.DataType, field, cap)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
@ -114,5 +114,6 @@ func verify(schema *schemapb.CollectionSchema, insertLogs map[int64][]string) er
|
|||||||
fieldID, len(logs), common.RowIDField, len(insertLogs[common.RowIDField])))
|
fieldID, len(logs), common.RowIDField, len(insertLogs[common.RowIDField])))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// for Function output field, we do not re-run the Function when restoring from a backup.
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -77,6 +77,24 @@ func (suite *ReaderSuite) run(dataType schemapb.DataType, elemType schemapb.Data
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
if dataType == schemapb.DataType_VarChar {
|
||||||
|
// Add a BM25 function if data type is VarChar
|
||||||
|
schema.Fields = append(schema.Fields, &schemapb.FieldSchema{
|
||||||
|
FieldID: 103,
|
||||||
|
Name: "sparse",
|
||||||
|
DataType: schemapb.DataType_SparseFloatVector,
|
||||||
|
IsFunctionOutput: true,
|
||||||
|
})
|
||||||
|
schema.Functions = append(schema.Functions, &schemapb.FunctionSchema{
|
||||||
|
Id: 1000,
|
||||||
|
Name: "bm25",
|
||||||
|
Type: schemapb.FunctionType_BM25,
|
||||||
|
InputFieldIds: []int64{102},
|
||||||
|
InputFieldNames: []string{dataType.String()},
|
||||||
|
OutputFieldIds: []int64{103},
|
||||||
|
OutputFieldNames: []string{"sparse"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// config
|
// config
|
||||||
// csv separator
|
// csv separator
|
||||||
|
|||||||
@ -26,10 +26,20 @@ type rowParser struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func NewRowParser(schema *schemapb.CollectionSchema, header []string, nullkey string) (RowParser, error) {
|
func NewRowParser(schema *schemapb.CollectionSchema, header []string, nullkey string) (RowParser, error) {
|
||||||
name2Field := lo.KeyBy(schema.GetFields(),
|
pkField, err := typeutil.GetPrimaryFieldSchema(schema)
|
||||||
func(field *schemapb.FieldSchema) string {
|
if err != nil {
|
||||||
return field.GetName()
|
return nil, err
|
||||||
})
|
}
|
||||||
|
dynamicField := typeutil.GetDynamicField(schema)
|
||||||
|
|
||||||
|
name2Field := lo.SliceToMap(
|
||||||
|
lo.Filter(schema.GetFields(), func(field *schemapb.FieldSchema, _ int) bool {
|
||||||
|
return !field.GetIsFunctionOutput() && !typeutil.IsAutoPKField(field) && field.GetName() != dynamicField.GetName()
|
||||||
|
}),
|
||||||
|
func(field *schemapb.FieldSchema) (string, *schemapb.FieldSchema) {
|
||||||
|
return field.GetName(), field
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
name2Dim := make(map[string]int)
|
name2Dim := make(map[string]int)
|
||||||
for name, field := range name2Field {
|
for name, field := range name2Field {
|
||||||
@ -42,20 +52,6 @@ func NewRowParser(schema *schemapb.CollectionSchema, header []string, nullkey st
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pkField, err := typeutil.GetPrimaryFieldSchema(schema)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if pkField.GetAutoID() {
|
|
||||||
delete(name2Field, pkField.GetName())
|
|
||||||
}
|
|
||||||
|
|
||||||
dynamicField := typeutil.GetDynamicField(schema)
|
|
||||||
if dynamicField != nil {
|
|
||||||
delete(name2Field, dynamicField.GetName())
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if csv header provides the primary key while it should be auto-generated
|
// check if csv header provides the primary key while it should be auto-generated
|
||||||
if pkField.GetAutoID() && lo.Contains(header, pkField.GetName()) {
|
if pkField.GetAutoID() && lo.Contains(header, pkField.GetName()) {
|
||||||
return nil, merr.WrapErrImportFailed(
|
return nil, merr.WrapErrImportFailed(
|
||||||
|
|||||||
@ -101,6 +101,25 @@ func (suite *ReaderSuite) run(dataType schemapb.DataType, elemType schemapb.Data
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if dataType == schemapb.DataType_VarChar {
|
||||||
|
// Add a BM25 function if data type is VarChar
|
||||||
|
schema.Fields = append(schema.Fields, &schemapb.FieldSchema{
|
||||||
|
FieldID: 103,
|
||||||
|
Name: "sparse",
|
||||||
|
DataType: schemapb.DataType_SparseFloatVector,
|
||||||
|
IsFunctionOutput: true,
|
||||||
|
})
|
||||||
|
schema.Functions = append(schema.Functions, &schemapb.FunctionSchema{
|
||||||
|
Id: 1000,
|
||||||
|
Name: "bm25",
|
||||||
|
Type: schemapb.FunctionType_BM25,
|
||||||
|
InputFieldIds: []int64{102},
|
||||||
|
InputFieldNames: []string{dataType.String()},
|
||||||
|
OutputFieldIds: []int64{103},
|
||||||
|
OutputFieldNames: []string{"sparse"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
insertData, err := testutil.CreateInsertData(schema, suite.numRows)
|
insertData, err := testutil.CreateInsertData(schema, suite.numRows)
|
||||||
suite.NoError(err)
|
suite.NoError(err)
|
||||||
|
|
||||||
|
|||||||
@ -62,20 +62,17 @@ func NewRowParser(schema *schemapb.CollectionSchema) (RowParser, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
dynamicField := typeutil.GetDynamicField(schema)
|
||||||
|
|
||||||
name2FieldID := lo.SliceToMap(schema.GetFields(),
|
name2FieldID := lo.SliceToMap(
|
||||||
|
lo.Filter(schema.GetFields(), func(field *schemapb.FieldSchema, _ int) bool {
|
||||||
|
return !field.GetIsFunctionOutput() && !typeutil.IsAutoPKField(field) && field.GetName() != dynamicField.GetName()
|
||||||
|
}),
|
||||||
func(field *schemapb.FieldSchema) (string, int64) {
|
func(field *schemapb.FieldSchema) (string, int64) {
|
||||||
return field.GetName(), field.GetFieldID()
|
return field.GetName(), field.GetFieldID()
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
if pkField.GetAutoID() {
|
|
||||||
delete(name2FieldID, pkField.GetName())
|
|
||||||
}
|
|
||||||
|
|
||||||
dynamicField := typeutil.GetDynamicField(schema)
|
|
||||||
if dynamicField != nil {
|
|
||||||
delete(name2FieldID, dynamicField.GetName())
|
|
||||||
}
|
|
||||||
return &rowParser{
|
return &rowParser{
|
||||||
id2Dim: id2Dim,
|
id2Dim: id2Dim,
|
||||||
id2Field: id2Field,
|
id2Field: id2Field,
|
||||||
|
|||||||
@ -132,24 +132,32 @@ func CreateReaders(ctx context.Context, cm storage.ChunkManager, schema *schemap
|
|||||||
return name, path
|
return name, path
|
||||||
})
|
})
|
||||||
for _, field := range schema.GetFields() {
|
for _, field := range schema.GetFields() {
|
||||||
|
path, hasPath := nameToPath[field.GetName()]
|
||||||
if field.GetIsPrimaryKey() && field.GetAutoID() {
|
if field.GetIsPrimaryKey() && field.GetAutoID() {
|
||||||
if _, ok := nameToPath[field.GetName()]; ok {
|
if hasPath {
|
||||||
return nil, merr.WrapErrImportFailed(
|
return nil, merr.WrapErrImportFailed(
|
||||||
fmt.Sprintf("the primary key '%s' is auto-generated, no need to provide", field.GetName()))
|
fmt.Sprintf("the primary key '%s' is auto-generated, no need to provide", field.GetName()))
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if _, ok := nameToPath[field.GetName()]; !ok {
|
if field.GetIsFunctionOutput() {
|
||||||
|
if hasPath {
|
||||||
|
return nil, merr.WrapErrImportFailed(
|
||||||
|
fmt.Sprintf("field %s is Function output, should not be provided. Provided files: %v", field.GetName(), lo.Values(nameToPath)))
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !hasPath {
|
||||||
if field.GetIsDynamic() {
|
if field.GetIsDynamic() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
return nil, merr.WrapErrImportFailed(
|
return nil, merr.WrapErrImportFailed(
|
||||||
fmt.Sprintf("no file for field: %s, files: %v", field.GetName(), lo.Values(nameToPath)))
|
fmt.Sprintf("no file for field: %s, files: %v", field.GetName(), lo.Values(nameToPath)))
|
||||||
}
|
}
|
||||||
reader, err := cm.Reader(ctx, nameToPath[field.GetName()])
|
reader, err := cm.Reader(ctx, path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, merr.WrapErrImportFailed(
|
return nil, merr.WrapErrImportFailed(
|
||||||
fmt.Sprintf("failed to read the file '%s', error: %s", nameToPath[field.GetName()], err.Error()))
|
fmt.Sprintf("failed to read the file '%s', error: %s", path, err.Error()))
|
||||||
}
|
}
|
||||||
readers[field.GetFieldID()] = reader
|
readers[field.GetFieldID()] = reader
|
||||||
}
|
}
|
||||||
|
|||||||
@ -111,6 +111,26 @@ func (suite *ReaderSuite) run(dt schemapb.DataType) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if dt == schemapb.DataType_VarChar {
|
||||||
|
// Add a BM25 function if data type is VarChar
|
||||||
|
schema.Fields = append(schema.Fields, &schemapb.FieldSchema{
|
||||||
|
FieldID: 103,
|
||||||
|
Name: "sparse",
|
||||||
|
DataType: schemapb.DataType_SparseFloatVector,
|
||||||
|
IsFunctionOutput: true,
|
||||||
|
})
|
||||||
|
schema.Functions = append(schema.Functions, &schemapb.FunctionSchema{
|
||||||
|
Id: 1000,
|
||||||
|
Name: "bm25",
|
||||||
|
Type: schemapb.FunctionType_BM25,
|
||||||
|
InputFieldIds: []int64{102},
|
||||||
|
InputFieldNames: []string{dt.String()},
|
||||||
|
OutputFieldIds: []int64{103},
|
||||||
|
OutputFieldNames: []string{"sparse"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
insertData, err := testutil.CreateInsertData(schema, suite.numRows)
|
insertData, err := testutil.CreateInsertData(schema, suite.numRows)
|
||||||
suite.NoError(err)
|
suite.NoError(err)
|
||||||
fieldIDToField := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) int64 {
|
fieldIDToField := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) int64 {
|
||||||
@ -118,6 +138,9 @@ func (suite *ReaderSuite) run(dt schemapb.DataType) {
|
|||||||
})
|
})
|
||||||
files := make(map[int64]string)
|
files := make(map[int64]string)
|
||||||
for _, field := range schema.GetFields() {
|
for _, field := range schema.GetFields() {
|
||||||
|
if field.GetIsFunctionOutput() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
files[field.GetFieldID()] = fmt.Sprintf("%s.npy", field.GetName())
|
files[field.GetFieldID()] = fmt.Sprintf("%s.npy", field.GetName())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -452,4 +475,19 @@ func TestCreateReaders(t *testing.T) {
|
|||||||
}
|
}
|
||||||
_, err = CreateReaders(ctx, cm, schema, []string{"pk", "vec"})
|
_, err = CreateReaders(ctx, cm, schema, []string{"pk", "vec"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// auto id and Function
|
||||||
|
schema = &schemapb.CollectionSchema{
|
||||||
|
Fields: []*schemapb.FieldSchema{
|
||||||
|
{Name: "pk", DataType: schemapb.DataType_Int64, IsPrimaryKey: true, AutoID: true},
|
||||||
|
{Name: "vec", DataType: schemapb.DataType_FloatVector},
|
||||||
|
{Name: "text", DataType: schemapb.DataType_VarChar},
|
||||||
|
{Name: "sparse", DataType: schemapb.DataType_SparseFloatVector, IsFunctionOutput: true},
|
||||||
|
},
|
||||||
|
Functions: []*schemapb.FunctionSchema{
|
||||||
|
{Name: "bm25", InputFieldNames: []string{"text"}, OutputFieldNames: []string{"sparse"}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
_, err = CreateReaders(ctx, cm, schema, []string{"vec", "text"})
|
||||||
|
assert.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -146,6 +146,24 @@ func (s *ReaderSuite) run(dataType schemapb.DataType, elemType schemapb.DataType
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
if dataType == schemapb.DataType_VarChar {
|
||||||
|
// Add a BM25 function if data type is VarChar
|
||||||
|
schema.Fields = append(schema.Fields, &schemapb.FieldSchema{
|
||||||
|
FieldID: 103,
|
||||||
|
Name: "sparse",
|
||||||
|
DataType: schemapb.DataType_SparseFloatVector,
|
||||||
|
IsFunctionOutput: true,
|
||||||
|
})
|
||||||
|
schema.Functions = append(schema.Functions, &schemapb.FunctionSchema{
|
||||||
|
Id: 1000,
|
||||||
|
Name: "bm25",
|
||||||
|
Type: schemapb.FunctionType_BM25,
|
||||||
|
InputFieldIds: []int64{102},
|
||||||
|
InputFieldNames: []string{dataType.String()},
|
||||||
|
OutputFieldIds: []int64{103},
|
||||||
|
OutputFieldNames: []string{"sparse"},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
filePath := fmt.Sprintf("/tmp/test_%d_reader.parquet", rand.Int())
|
filePath := fmt.Sprintf("/tmp/test_%d_reader.parquet", rand.Int())
|
||||||
defer os.Remove(filePath)
|
defer os.Remove(filePath)
|
||||||
|
|||||||
@ -82,7 +82,7 @@ func CreateFieldReaders(ctx context.Context, fileReader *pqarrow.FileReader, sch
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, field := range nameToField {
|
for _, field := range nameToField {
|
||||||
if typeutil.IsAutoPKField(field) || field.GetIsDynamic() {
|
if typeutil.IsAutoPKField(field) || field.GetIsDynamic() || field.GetIsFunctionOutput() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if _, ok := crs[field.GetFieldID()]; !ok {
|
if _, ok := crs[field.GetFieldID()]; !ok {
|
||||||
@ -206,10 +206,12 @@ func convertToArrowDataType(field *schemapb.FieldSchema, isArray bool) (arrow.Da
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This method is used only by import util and related tests. Returned arrow.Schema
|
||||||
|
// doesn't include function output fields.
|
||||||
func ConvertToArrowSchema(schema *schemapb.CollectionSchema, useNullType bool) (*arrow.Schema, error) {
|
func ConvertToArrowSchema(schema *schemapb.CollectionSchema, useNullType bool) (*arrow.Schema, error) {
|
||||||
arrFields := make([]arrow.Field, 0)
|
arrFields := make([]arrow.Field, 0)
|
||||||
for _, field := range schema.GetFields() {
|
for _, field := range schema.GetFields() {
|
||||||
if typeutil.IsAutoPKField(field) {
|
if typeutil.IsAutoPKField(field) || field.GetIsFunctionOutput() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
arrDataType, err := convertToArrowDataType(field, false)
|
arrDataType, err := convertToArrowDataType(field, false)
|
||||||
@ -234,7 +236,7 @@ func isSchemaEqual(schema *schemapb.CollectionSchema, arrSchema *arrow.Schema) e
|
|||||||
return field.Name
|
return field.Name
|
||||||
})
|
})
|
||||||
for _, field := range schema.GetFields() {
|
for _, field := range schema.GetFields() {
|
||||||
if typeutil.IsAutoPKField(field) {
|
if typeutil.IsAutoPKField(field) || field.GetIsFunctionOutput() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
arrField, ok := arrNameToField[field.GetName()]
|
arrField, ok := arrNameToField[field.GetName()]
|
||||||
|
|||||||
@ -109,7 +109,7 @@ func CreateInsertData(schema *schemapb.CollectionSchema, rows int, nullPercent .
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, f := range schema.GetFields() {
|
for _, f := range schema.GetFields() {
|
||||||
if f.GetAutoID() {
|
if f.GetAutoID() || f.IsFunctionOutput {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
switch f.GetDataType() {
|
switch f.GetDataType() {
|
||||||
@ -213,7 +213,7 @@ func BuildArrayData(schema *schemapb.CollectionSchema, insertData *storage.Inser
|
|||||||
mem := memory.NewGoAllocator()
|
mem := memory.NewGoAllocator()
|
||||||
columns := make([]arrow.Array, 0, len(schema.Fields))
|
columns := make([]arrow.Array, 0, len(schema.Fields))
|
||||||
for _, field := range schema.Fields {
|
for _, field := range schema.Fields {
|
||||||
if field.GetIsPrimaryKey() && field.GetAutoID() {
|
if field.GetIsPrimaryKey() && field.GetAutoID() || field.GetIsFunctionOutput() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fieldID := field.GetFieldID()
|
fieldID := field.GetFieldID()
|
||||||
@ -531,7 +531,7 @@ func CreateInsertDataRowsForJSON(schema *schemapb.CollectionSchema, insertData *
|
|||||||
field := fieldIDToField[fieldID]
|
field := fieldIDToField[fieldID]
|
||||||
dataType := field.GetDataType()
|
dataType := field.GetDataType()
|
||||||
elemType := field.GetElementType()
|
elemType := field.GetElementType()
|
||||||
if field.GetAutoID() {
|
if field.GetAutoID() || field.IsFunctionOutput {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if v.GetRow(i) == nil {
|
if v.GetRow(i) == nil {
|
||||||
@ -590,11 +590,12 @@ func CreateInsertDataForCSV(schema *schemapb.CollectionSchema, insertData *stora
|
|||||||
csvData := make([][]string, 0, rowNum+1)
|
csvData := make([][]string, 0, rowNum+1)
|
||||||
|
|
||||||
header := make([]string, 0)
|
header := make([]string, 0)
|
||||||
nameToFields := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) string {
|
fields := lo.Filter(schema.GetFields(), func(field *schemapb.FieldSchema, _ int) bool {
|
||||||
|
return !field.GetAutoID() && !field.IsFunctionOutput
|
||||||
|
})
|
||||||
|
nameToFields := lo.KeyBy(fields, func(field *schemapb.FieldSchema) string {
|
||||||
name := field.GetName()
|
name := field.GetName()
|
||||||
if !field.GetAutoID() {
|
header = append(header, name)
|
||||||
header = append(header, name)
|
|
||||||
}
|
|
||||||
return name
|
return name
|
||||||
})
|
})
|
||||||
csvData = append(csvData, header)
|
csvData = append(csvData, header)
|
||||||
@ -606,9 +607,6 @@ func CreateInsertDataForCSV(schema *schemapb.CollectionSchema, insertData *stora
|
|||||||
value := insertData.Data[field.FieldID]
|
value := insertData.Data[field.FieldID]
|
||||||
dataType := field.GetDataType()
|
dataType := field.GetDataType()
|
||||||
elemType := field.GetElementType()
|
elemType := field.GetElementType()
|
||||||
if field.GetAutoID() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// deal with null value
|
// deal with null value
|
||||||
if field.GetNullable() && value.GetRow(i) == nil {
|
if field.GetNullable() && value.GetRow(i) == nil {
|
||||||
data = append(data, nullkey)
|
data = append(data, nullkey)
|
||||||
|
|||||||
@ -24,6 +24,7 @@ import (
|
|||||||
"math/rand"
|
"math/rand"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/x448/float16"
|
"github.com/x448/float16"
|
||||||
|
|
||||||
@ -109,8 +110,21 @@ func GenerateVarCharArray(numRows int, maxLen int) []string {
|
|||||||
|
|
||||||
func GenerateStringArray(numRows int) []string {
|
func GenerateStringArray(numRows int) []string {
|
||||||
ret := make([]string, 0, numRows)
|
ret := make([]string, 0, numRows)
|
||||||
|
|
||||||
|
genSentence := func() string {
|
||||||
|
words := []string{"hello", "world", "this", "is", "a", "test", "sentence", "milvus", "vector", "database", "search", "engine", "fast", "efficient", "scalable"}
|
||||||
|
selectedWords := make([]string, rand.Intn(6)+5) // 5 to 10 words
|
||||||
|
for i := range selectedWords {
|
||||||
|
selectedWords[i] = words[rand.Intn(len(words))]
|
||||||
|
}
|
||||||
|
rand.Shuffle(len(selectedWords), func(i, j int) {
|
||||||
|
selectedWords[i], selectedWords[j] = selectedWords[j], selectedWords[i]
|
||||||
|
})
|
||||||
|
return strings.Join(selectedWords, " ")
|
||||||
|
}
|
||||||
|
|
||||||
for i := 0; i < numRows; i++ {
|
for i := 0; i < numRows; i++ {
|
||||||
ret = append(ret, strconv.Itoa(i))
|
ret = append(ret, genSentence())
|
||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user