mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-08 18:18:30 +08:00
enhance: [2.5] Add UTF-8 string validation for import (#40746)
issue: https://github.com/milvus-io/milvus/issues/40684 pr: https://github.com/milvus-io/milvus/pull/40694 --------- Signed-off-by: bigsheeper <yihao.dai@zilliz.com>
This commit is contained in:
parent
753f8c8410
commit
cba8feade3
@ -61,11 +61,7 @@ func getInsertDataRowNum(data *storage.InsertData, schema *schemapb.CollectionSc
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func CheckVarcharLength(data any, maxLength int64, field *schemapb.FieldSchema) error {
|
func CheckVarcharLength(str string, maxLength int64, field *schemapb.FieldSchema) error {
|
||||||
str, ok := data.(string)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("expected string type for field %s, but got %T", field.GetName(), data)
|
|
||||||
}
|
|
||||||
if (int64)(len(str)) > maxLength {
|
if (int64)(len(str)) > maxLength {
|
||||||
return fmt.Errorf("value length(%d) for field %s exceeds max_length(%d)", len(str), field.GetName(), maxLength)
|
return fmt.Errorf("value length(%d) for field %s exceeds max_length(%d)", len(str), field.GetName(), maxLength)
|
||||||
}
|
}
|
||||||
@ -96,3 +92,10 @@ func EstimateReadCountPerBatch(bufferSize int, schema *schemapb.CollectionSchema
|
|||||||
}
|
}
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func CheckValidUTF8(s string, field *schemapb.FieldSchema) error {
|
||||||
|
if !typeutil.IsUTF8(s) {
|
||||||
|
return fmt.Errorf("field %s contains invalid UTF-8 data, value=%s", field.GetName(), s)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@ -241,6 +241,9 @@ func (r *rowParser) parseEntity(field *schemapb.FieldSchema, obj string) (any, e
|
|||||||
if nullable && obj == r.nullkey {
|
if nullable && obj == r.nullkey {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
if err := common.CheckValidUTF8(obj, field); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
maxLength, err := parameterutil.GetMaxLength(field)
|
maxLength, err := parameterutil.GetMaxLength(field)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -355,7 +358,7 @@ func (r *rowParser) parseEntity(field *schemapb.FieldSchema, obj string) (any, e
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// elements in array not support null value
|
// elements in array not support null value
|
||||||
scalarFieldData, err := r.arrayToFieldData(vec, field.GetElementType())
|
scalarFieldData, err := r.arrayToFieldData(vec, field)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -366,7 +369,8 @@ func (r *rowParser) parseEntity(field *schemapb.FieldSchema, obj string) (any, e
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *rowParser) arrayToFieldData(arr []interface{}, eleType schemapb.DataType) (*schemapb.ScalarField, error) {
|
func (r *rowParser) arrayToFieldData(arr []interface{}, field *schemapb.FieldSchema) (*schemapb.ScalarField, error) {
|
||||||
|
eleType := field.GetElementType()
|
||||||
switch eleType {
|
switch eleType {
|
||||||
case schemapb.DataType_Bool:
|
case schemapb.DataType_Bool:
|
||||||
values := make([]bool, len(arr))
|
values := make([]bool, len(arr))
|
||||||
@ -478,6 +482,16 @@ func (r *rowParser) arrayToFieldData(arr []interface{}, eleType schemapb.DataTyp
|
|||||||
if !ok {
|
if !ok {
|
||||||
return nil, r.wrapArrayValueTypeError(arr, eleType)
|
return nil, r.wrapArrayValueTypeError(arr, eleType)
|
||||||
}
|
}
|
||||||
|
if err := common.CheckValidUTF8(value, field); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
maxLength, err := parameterutil.GetMaxLength(field)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := common.CheckVarcharLength(value, maxLength, field); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
values[i] = value
|
values[i] = value
|
||||||
}
|
}
|
||||||
return &schemapb.ScalarField{
|
return &schemapb.ScalarField{
|
||||||
|
|||||||
@ -249,7 +249,13 @@ func TestRowParser_Parse_Invalid(t *testing.T) {
|
|||||||
TypeParams: []*commonpb.KeyValuePair{{Key: common.DimKey, Value: "2"}},
|
TypeParams: []*commonpb.KeyValuePair{{Key: common.DimKey, Value: "2"}},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
FieldID: 3,
|
FieldID: 3,
|
||||||
|
Name: "text",
|
||||||
|
DataType: schemapb.DataType_VarChar,
|
||||||
|
TypeParams: []*commonpb.KeyValuePair{{Key: common.MaxLengthKey, Value: "16"}},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
FieldID: 4,
|
||||||
Name: "$meta",
|
Name: "$meta",
|
||||||
IsDynamic: true,
|
IsDynamic: true,
|
||||||
DataType: schemapb.DataType_JSON,
|
DataType: schemapb.DataType_JSON,
|
||||||
@ -264,9 +270,10 @@ func TestRowParser_Parse_Invalid(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
cases := []testCase{
|
cases := []testCase{
|
||||||
{header: []string{"id", "vector", "x", "$meta"}, row: []string{"1", "[1, 2]", "6", "{\"x\": 8}"}, expectErr: "duplicated key in dynamic field, key=x"},
|
{header: []string{"id", "vector", "text", "x", "$meta"}, row: []string{"1", "[1, 2]", "aaa", "6", "{\"x\": 8}"}, expectErr: "duplicated key in dynamic field, key=x"},
|
||||||
{header: []string{"id", "vector", "x", "$meta"}, row: []string{"1", "[1, 2]", "8", "{*&%%&$*(&}"}, expectErr: "illegal value for dynamic field, not a JSON format string"},
|
{header: []string{"id", "vector", "text", "x", "$meta"}, row: []string{"1", "[1, 2]", "aaa", "8", "{*&%%&$*(&}"}, expectErr: "illegal value for dynamic field, not a JSON format string"},
|
||||||
{header: []string{"id", "vector", "x", "$meta"}, row: []string{"1", "[1, 2]", "8"}, expectErr: "the number of fields in the row is not equal to the header"},
|
{header: []string{"id", "vector", "text", "x", "$meta"}, row: []string{"1", "[1, 2]", "aaa", "8"}, expectErr: "the number of fields in the row is not equal to the header"},
|
||||||
|
{header: []string{"id", "vector", "text", "x", "$meta"}, row: []string{"1", "[1, 2]", "\xc3\x28", "6", "{\"y\": 8}"}, expectErr: "contains invalid UTF-8 data"},
|
||||||
}
|
}
|
||||||
|
|
||||||
nullkey := ""
|
nullkey := ""
|
||||||
|
|||||||
@ -530,6 +530,13 @@ func (r *rowParser) parseNullableEntity(fieldID int64, obj any) (any, error) {
|
|||||||
if !ok {
|
if !ok {
|
||||||
return nil, r.wrapTypeError(obj, fieldID)
|
return nil, r.wrapTypeError(obj, fieldID)
|
||||||
}
|
}
|
||||||
|
maxLength, err := parameterutil.GetMaxLength(r.id2Field[fieldID])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err = common.CheckVarcharLength(value, maxLength, r.id2Field[fieldID]); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
return value, nil
|
return value, nil
|
||||||
case schemapb.DataType_JSON:
|
case schemapb.DataType_JSON:
|
||||||
if obj == nil {
|
if obj == nil {
|
||||||
|
|||||||
@ -219,4 +219,19 @@ func TestRowParser_Parse_Invalid(t *testing.T) {
|
|||||||
assert.True(t, strings.Contains(err.Error(), c.expectErr))
|
assert.True(t, strings.Contains(err.Error(), c.expectErr))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cases = []testCase{
|
||||||
|
{name: `{"id": 1, "vector": [], "arrayField": [1, 2, 3], "name": "\xc3\x28"}`, expectErr: "Syntax error"}, // test invalid uft-8
|
||||||
|
}
|
||||||
|
for _, c := range cases {
|
||||||
|
t.Run(c.name, func(t *testing.T) {
|
||||||
|
var mp map[string]interface{}
|
||||||
|
|
||||||
|
desc := json.NewDecoder(strings.NewReader(c.name))
|
||||||
|
desc.UseNumber()
|
||||||
|
err = desc.Decode(&mp)
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.True(t, strings.Contains(err.Error(), c.expectErr))
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -310,7 +310,11 @@ func (c *FieldReader) ReadString(count int64) ([]string, error) {
|
|||||||
if n > 0 {
|
if n > 0 {
|
||||||
buf = buf[:n]
|
buf = buf[:n]
|
||||||
}
|
}
|
||||||
data = append(data, string(buf))
|
str := string(buf)
|
||||||
|
if err = common.CheckValidUTF8(str, c.field); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
data = append(data, str)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return data, nil
|
return data, nil
|
||||||
|
|||||||
117
internal/util/importutilv2/numpy/field_reader_test.go
Normal file
117
internal/util/importutilv2/numpy/field_reader_test.go
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
// Licensed to the LF AI & Data foundation under one
|
||||||
|
// or more contributor license agreements. See the NOTICE file
|
||||||
|
// distributed with this work for additional information
|
||||||
|
// regarding copyright ownership. The ASF licenses this file
|
||||||
|
// to you under the Apache License, Version 2.0 (the
|
||||||
|
// "License"); you may not use this file except in compliance
|
||||||
|
// with the License. You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package numpy
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"golang.org/x/text/encoding/simplifiedchinese"
|
||||||
|
"golang.org/x/text/transform"
|
||||||
|
|
||||||
|
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
||||||
|
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
||||||
|
)
|
||||||
|
|
||||||
|
func encodeToGB2312(input string) ([]byte, error) {
|
||||||
|
encoder := simplifiedchinese.GB18030.NewEncoder() // GB18030 is compatible with GB2312.
|
||||||
|
var buf bytes.Buffer
|
||||||
|
writer := transform.NewWriter(&buf, encoder)
|
||||||
|
|
||||||
|
_, err := writer.Write([]byte(input))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func WriteNonUTF8Npy() io.Reader {
|
||||||
|
// Use bytes.Buffer instead of writing to a file
|
||||||
|
var buffer bytes.Buffer
|
||||||
|
|
||||||
|
// Step 1: Write Magic Number and version
|
||||||
|
buffer.Write([]byte{0x93, 'N', 'U', 'M', 'P', 'Y'})
|
||||||
|
buffer.Write([]byte{1, 0}) // Numpy 1.0 version
|
||||||
|
|
||||||
|
// Step 2: Construct the header (using '|S20' for 20-byte fixed-length strings)
|
||||||
|
header := "{'descr': '|S20', 'fortran_order': False, 'shape': (6,), }"
|
||||||
|
headerBytes := []byte(header)
|
||||||
|
|
||||||
|
// Pad the header to align its length to a multiple of 64 bytes
|
||||||
|
padding := 64 - (10+len(headerBytes)+1)%64
|
||||||
|
headerBytes = append(headerBytes, make([]byte, padding)...)
|
||||||
|
headerBytes = append(headerBytes, '\n')
|
||||||
|
|
||||||
|
// Write the header length and the header itself
|
||||||
|
binary.Write(&buffer, binary.LittleEndian, uint16(len(headerBytes)))
|
||||||
|
buffer.Write(headerBytes)
|
||||||
|
|
||||||
|
// Step 3: Write non-UTF-8 string data (e.g., Latin1 encoded)
|
||||||
|
testStrings := []string{
|
||||||
|
"hello, vector database", // valid utf-8
|
||||||
|
string([]byte{0xC3, 0xA9, 0xB5, 0xF1}), // Latin1 encoded "éµñ"
|
||||||
|
string([]byte{0xB0, 0xE1, 0xF2, 0xBD}), // Random non-UTF-8 data
|
||||||
|
string([]byte{0xD2, 0xA9, 0xC8, 0xFC}), // Another set of non-UTF-8 data
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, str := range testStrings {
|
||||||
|
data := make([]byte, 20)
|
||||||
|
copy(data, str)
|
||||||
|
buffer.Write(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Encode and write GB-2312 Chinese strings
|
||||||
|
chineseStrings := []string{
|
||||||
|
"你好,世界", // "Hello, World"
|
||||||
|
"向量数据库", // "Data Storage"
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, str := range chineseStrings {
|
||||||
|
gb2312Data, err := encodeToGB2312(str)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
data := make([]byte, 20)
|
||||||
|
copy(data, gb2312Data)
|
||||||
|
buffer.Write(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5: Convert buffer to a reader to simulate file reading
|
||||||
|
reader := bytes.NewReader(buffer.Bytes())
|
||||||
|
return reader
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInvalidUTF8(t *testing.T) {
|
||||||
|
fieldSchema := &schemapb.FieldSchema{
|
||||||
|
FieldID: 100,
|
||||||
|
Name: "str",
|
||||||
|
DataType: schemapb.DataType_VarChar,
|
||||||
|
TypeParams: []*commonpb.KeyValuePair{{Key: "max_length", Value: "256"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := WriteNonUTF8Npy()
|
||||||
|
fr, err := NewFieldReader(reader, fieldSchema)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = fr.Next(int64(6))
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.True(t, strings.Contains(err.Error(), "contains invalid UTF-8 data"))
|
||||||
|
}
|
||||||
@ -576,10 +576,14 @@ func ReadVarcharData(pcr *FieldReader, count int64) (any, error) {
|
|||||||
return nil, WrapTypeErr("string", chunk.DataType().Name(), pcr.field)
|
return nil, WrapTypeErr("string", chunk.DataType().Name(), pcr.field)
|
||||||
}
|
}
|
||||||
for i := 0; i < dataNums; i++ {
|
for i := 0; i < dataNums; i++ {
|
||||||
if err = common.CheckVarcharLength(stringReader.Value(i), maxLength, pcr.field); err != nil {
|
value := stringReader.Value(i)
|
||||||
|
if err = common.CheckValidUTF8(value, pcr.field); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
data = append(data, stringReader.Value(i))
|
if err = common.CheckVarcharLength(value, maxLength, pcr.field); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
data = append(data, value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
@ -617,10 +621,14 @@ func ReadNullableVarcharData(pcr *FieldReader, count int64) (any, []bool, error)
|
|||||||
data = append(data, "")
|
data = append(data, "")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if err = common.CheckVarcharLength(stringReader.Value(i), maxLength, pcr.field); err != nil {
|
value := stringReader.ValueStr(i)
|
||||||
|
if err = common.CheckValidUTF8(value, pcr.field); err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
data = append(data, stringReader.ValueStr(i))
|
if err = common.CheckVarcharLength(value, maxLength, pcr.field); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
data = append(data, value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1193,6 +1201,10 @@ func ReadNullableIntegerOrFloatArrayData[T constraints.Integer | constraints.Flo
|
|||||||
}
|
}
|
||||||
|
|
||||||
func ReadStringArrayData(pcr *FieldReader, count int64) (any, error) {
|
func ReadStringArrayData(pcr *FieldReader, count int64) (any, error) {
|
||||||
|
maxLength, err := parameterutil.GetMaxLength(pcr.field)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
chunked, err := pcr.columnReader.NextBatch(count)
|
chunked, err := pcr.columnReader.NextBatch(count)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -1212,7 +1224,14 @@ func ReadStringArrayData(pcr *FieldReader, count int64) (any, error) {
|
|||||||
start, end := offsets[i-1], offsets[i]
|
start, end := offsets[i-1], offsets[i]
|
||||||
elementData := make([]string, 0, end-start)
|
elementData := make([]string, 0, end-start)
|
||||||
for j := start; j < end; j++ {
|
for j := start; j < end; j++ {
|
||||||
elementData = append(elementData, stringReader.Value(int(j)))
|
value := stringReader.Value(int(j))
|
||||||
|
if err = common.CheckValidUTF8(value, pcr.field); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err = common.CheckVarcharLength(value, maxLength, pcr.field); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
elementData = append(elementData, value)
|
||||||
}
|
}
|
||||||
data = append(data, elementData)
|
data = append(data, elementData)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,16 +1,86 @@
|
|||||||
package parquet
|
package parquet
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"math/rand"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/apache/arrow/go/v12/arrow"
|
"github.com/apache/arrow/go/v12/arrow"
|
||||||
"github.com/apache/arrow/go/v12/arrow/array"
|
"github.com/apache/arrow/go/v12/arrow/array"
|
||||||
"github.com/apache/arrow/go/v12/arrow/memory"
|
"github.com/apache/arrow/go/v12/arrow/memory"
|
||||||
|
"github.com/apache/arrow/go/v12/parquet"
|
||||||
|
"github.com/apache/arrow/go/v12/parquet/pqarrow"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
|
||||||
|
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
||||||
|
"github.com/milvus-io/milvus/internal/storage"
|
||||||
|
"github.com/milvus-io/milvus/internal/util/testutil"
|
||||||
"github.com/milvus-io/milvus/pkg/v2/util/typeutil"
|
"github.com/milvus-io/milvus/pkg/v2/util/typeutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestInvalidUTF8(t *testing.T) {
|
||||||
|
const (
|
||||||
|
fieldID = int64(100)
|
||||||
|
numRows = 100
|
||||||
|
)
|
||||||
|
|
||||||
|
schema := &schemapb.CollectionSchema{
|
||||||
|
Fields: []*schemapb.FieldSchema{
|
||||||
|
{
|
||||||
|
FieldID: fieldID,
|
||||||
|
Name: "str",
|
||||||
|
DataType: schemapb.DataType_VarChar,
|
||||||
|
TypeParams: []*commonpb.KeyValuePair{{Key: "max_length", Value: "256"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
data := make([]string, numRows)
|
||||||
|
for i := 0; i < numRows-1; i++ {
|
||||||
|
data[i] = randomString(16)
|
||||||
|
}
|
||||||
|
data[numRows-1] = "\xc3\x28" // invalid utf-8
|
||||||
|
|
||||||
|
filePath := fmt.Sprintf("test_%d_reader.parquet", rand.Int())
|
||||||
|
defer os.Remove(filePath)
|
||||||
|
wf, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE, 0o666)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
pqSchema, err := ConvertToArrowSchema(schema, false)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
fw, err := pqarrow.NewFileWriter(pqSchema, wf,
|
||||||
|
parquet.NewWriterProperties(parquet.WithMaxRowGroupLength(numRows)), pqarrow.DefaultWriterProps())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
insertData, err := storage.NewInsertData(schema)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
err = insertData.Data[fieldID].AppendDataRows(data)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
columns, err := testutil.BuildArrayData(schema, insertData, false)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
recordBatch := array.NewRecord(pqSchema, columns, numRows)
|
||||||
|
err = fw.Write(recordBatch)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
fw.Close()
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
f := storage.NewChunkManagerFactory("local", storage.RootPath("/tmp/milvus_test/test_parquet_reader/"))
|
||||||
|
cm, err := f.NewPersistentStorageChunkManager(ctx)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
reader, err := NewReader(ctx, cm, schema, filePath, 64*1024*1024)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = reader.Read()
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.True(t, strings.Contains(err.Error(), "contains invalid UTF-8 data"))
|
||||||
|
}
|
||||||
|
|
||||||
// TestParseSparseFloatRowVector tests the parseSparseFloatRowVector function
|
// TestParseSparseFloatRowVector tests the parseSparseFloatRowVector function
|
||||||
func TestParseSparseFloatRowVector(t *testing.T) {
|
func TestParseSparseFloatRowVector(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
@ -40,13 +40,6 @@ func WrapTypeErr(expect string, actual string, field *schemapb.FieldSchema) erro
|
|||||||
expect, field.GetName(), actual))
|
expect, field.GetName(), actual))
|
||||||
}
|
}
|
||||||
|
|
||||||
func calcBufferSize(blockSize int, schema *schemapb.CollectionSchema) int {
|
|
||||||
if len(schema.GetFields()) <= 0 {
|
|
||||||
return blockSize
|
|
||||||
}
|
|
||||||
return blockSize / len(schema.GetFields())
|
|
||||||
}
|
|
||||||
|
|
||||||
func CreateFieldReaders(ctx context.Context, fileReader *pqarrow.FileReader, schema *schemapb.CollectionSchema) (map[int64]*FieldReader, error) {
|
func CreateFieldReaders(ctx context.Context, fileReader *pqarrow.FileReader, schema *schemapb.CollectionSchema) (map[int64]*FieldReader, error) {
|
||||||
nameToField := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) string {
|
nameToField := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) string {
|
||||||
return field.GetName()
|
return field.GetName()
|
||||||
|
|||||||
@ -18,6 +18,7 @@ package typeutil
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
|
"unicode/utf8"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -65,3 +66,11 @@ func UnsafeStr2bytes(s string) []byte {
|
|||||||
func UnsafeBytes2str(b []byte) string {
|
func UnsafeBytes2str(b []byte) string {
|
||||||
return *(*string)(unsafe.Pointer(&b))
|
return *(*string)(unsafe.Pointer(&b))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func IsUTF8(s string) bool {
|
||||||
|
return utf8.ValidString(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsUTF8Bytes(b []byte) bool {
|
||||||
|
return utf8.Valid(b)
|
||||||
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user