mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-06 17:18:35 +08:00
feat: Geospatial Data Type and GIS Function support for milvus (#44547)
issue: #43427 This pr's main goal is merge #37417 to milvus 2.5 without conflicts. # Main Goals 1. Create and describe collections with geospatial type 2. Insert geospatial data into the insert binlog 3. Load segments containing geospatial data into memory 4. Enable query and search can display geospatial data 5. Support using GIS funtions like ST_EQUALS in query 6. Support R-Tree index for geometry type # Solution 1. **Add Type**: Modify the Milvus core by adding a Geospatial type in both the C++ and Go code layers, defining the Geospatial data structure and the corresponding interfaces. 2. **Dependency Libraries**: Introduce necessary geospatial data processing libraries. In the C++ source code, use Conan package management to include the GDAL library. In the Go source code, add the go-geom library to the go.mod file. 3. **Protocol Interface**: Revise the Milvus protocol to provide mechanisms for Geospatial message serialization and deserialization. 4. **Data Pipeline**: Facilitate interaction between the client and proxy using the WKT format for geospatial data. The proxy will convert all data into WKB format for downstream processing, providing column data interfaces, segment encapsulation, segment loading, payload writing, and cache block management. 5. **Query Operators**: Implement simple display and support for filter queries. Initially, focus on filtering based on spatial relationships for a single column of geospatial literal values, providing parsing and execution for query expressions.Now only support brutal search 7. **Client Modification**: Enable the client to handle user input for geospatial data and facilitate end-to-end testing.Check the modification in pymilvus. --------- Signed-off-by: Yinwei Li <yinwei.li@zilliz.com> Signed-off-by: Cai Zhang <cai.zhang@zilliz.com> Co-authored-by: ZhuXi <150327960+Yinwei-Yu@users.noreply.github.com>
This commit is contained in:
parent
2bd377af24
commit
19346fa389
@ -212,6 +212,9 @@ func FieldDataColumn(fd *schemapb.FieldData, begin, end int) (Column, error) {
|
||||
case schemapb.DataType_JSON:
|
||||
return parseScalarData(fd.GetFieldName(), fd.GetScalars().GetJsonData().GetData(), begin, end, validData, NewColumnJSONBytes, NewNullableColumnJSONBytes)
|
||||
|
||||
case schemapb.DataType_Geometry:
|
||||
return parseScalarData(fd.GetFieldName(), fd.GetScalars().GetGeometryWktData().GetData(), begin, end, validData, NewColumnGeometryWKT, NewNullableColumnGeometryWKT)
|
||||
|
||||
case schemapb.DataType_FloatVector:
|
||||
vectors := fd.GetVectors()
|
||||
x, ok := vectors.GetData().(*schemapb.VectorField_FloatVector)
|
||||
|
||||
@ -117,7 +117,8 @@ func values2FieldData[T any](values []T, fieldType entity.FieldType, dim int) *s
|
||||
entity.FieldTypeInt64,
|
||||
entity.FieldTypeVarChar,
|
||||
entity.FieldTypeString,
|
||||
entity.FieldTypeJSON:
|
||||
entity.FieldTypeJSON,
|
||||
entity.FieldTypeGeometry:
|
||||
fd.Field = &schemapb.FieldData_Scalars{
|
||||
Scalars: values2Scalars(values, fieldType), // scalars,
|
||||
}
|
||||
@ -199,6 +200,12 @@ func values2Scalars[T any](values []T, fieldType entity.FieldType) *schemapb.Sca
|
||||
Data: data,
|
||||
},
|
||||
}
|
||||
case entity.FieldTypeGeometry:
|
||||
var strVals []string
|
||||
strVals, ok = any(values).([]string)
|
||||
scalars.Data = &schemapb.ScalarField_GeometryWktData{
|
||||
GeometryWktData: &schemapb.GeometryWktArray{Data: strVals},
|
||||
}
|
||||
}
|
||||
// shall not be accessed
|
||||
if !ok {
|
||||
|
||||
91
client/column/geometry.go
Normal file
91
client/column/geometry.go
Normal file
@ -0,0 +1,91 @@
|
||||
package column
|
||||
|
||||
import (
|
||||
"github.com/cockroachdb/errors"
|
||||
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
||||
"github.com/milvus-io/milvus/client/v2/entity"
|
||||
)
|
||||
|
||||
type ColumnGeometryWKT struct {
|
||||
*genericColumnBase[string]
|
||||
}
|
||||
|
||||
// Name returns column name.
|
||||
func (c *ColumnGeometryWKT) Name() string {
|
||||
return c.name
|
||||
}
|
||||
|
||||
// Type returns column entity.FieldType.
|
||||
func (c *ColumnGeometryWKT) Type() entity.FieldType {
|
||||
return entity.FieldTypeGeometry
|
||||
}
|
||||
|
||||
// Len returns column values length.
|
||||
func (c *ColumnGeometryWKT) Len() int {
|
||||
return len(c.values)
|
||||
}
|
||||
|
||||
func (c *ColumnGeometryWKT) Slice(start, end int) Column {
|
||||
l := c.Len()
|
||||
if start > l {
|
||||
start = l
|
||||
}
|
||||
if end == -1 || end > l {
|
||||
end = l
|
||||
}
|
||||
return &ColumnGeometryWKT{
|
||||
genericColumnBase: c.genericColumnBase.slice(start, end),
|
||||
}
|
||||
}
|
||||
|
||||
// Get returns value at index as interface{}.
|
||||
func (c *ColumnGeometryWKT) Get(idx int) (interface{}, error) {
|
||||
if idx < 0 || idx >= c.Len() {
|
||||
return nil, errors.New("index out of range")
|
||||
}
|
||||
return c.values[idx], nil
|
||||
}
|
||||
|
||||
func (c *ColumnGeometryWKT) GetAsString(idx int) (string, error) {
|
||||
return c.ValueByIdx(idx)
|
||||
}
|
||||
|
||||
// FieldData return column data mapped to schemapb.FieldData.
|
||||
func (c *ColumnGeometryWKT) FieldData() *schemapb.FieldData {
|
||||
fd := c.genericColumnBase.FieldData()
|
||||
return fd
|
||||
}
|
||||
|
||||
// ValueByIdx returns value of the provided index.
|
||||
func (c *ColumnGeometryWKT) ValueByIdx(idx int) (string, error) {
|
||||
if idx < 0 || idx >= c.Len() {
|
||||
return "", errors.New("index out of range")
|
||||
}
|
||||
return c.values[idx], nil
|
||||
}
|
||||
|
||||
// AppendValue append value into column.
|
||||
func (c *ColumnGeometryWKT) AppendValue(i interface{}) error {
|
||||
s, ok := i.(string)
|
||||
if !ok {
|
||||
return errors.New("expect geometry WKT type(string)")
|
||||
}
|
||||
c.values = append(c.values, s)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Data returns column data.
|
||||
func (c *ColumnGeometryWKT) Data() []string {
|
||||
return c.values
|
||||
}
|
||||
|
||||
func NewColumnGeometryWKT(name string, values []string) *ColumnGeometryWKT {
|
||||
return &ColumnGeometryWKT{
|
||||
genericColumnBase: &genericColumnBase[string]{
|
||||
name: name,
|
||||
fieldType: entity.FieldTypeGeometry,
|
||||
values: values,
|
||||
},
|
||||
}
|
||||
}
|
||||
76
client/column/geometry_test.go
Normal file
76
client/column/geometry_test.go
Normal file
@ -0,0 +1,76 @@
|
||||
package column
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/milvus-io/milvus/client/v2/entity"
|
||||
)
|
||||
|
||||
type ColumnGeometryWKTSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
func (s *ColumnGeometryWKTSuite) SetupSuite() {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
}
|
||||
|
||||
func (s *ColumnGeometryWKTSuite) TestAttrMethods() {
|
||||
columnName := fmt.Sprintf("column_Geometrywkt_%d", rand.Int())
|
||||
columnLen := 8 + rand.Intn(10)
|
||||
|
||||
v := make([]string, columnLen)
|
||||
column := NewColumnGeometryWKT(columnName, v)
|
||||
|
||||
s.Run("test_meta", func() {
|
||||
ft := entity.FieldTypeGeometry
|
||||
s.Equal("Geometry", ft.Name())
|
||||
s.Equal("Geometry", ft.String())
|
||||
pbName, pbType := ft.PbFieldType()
|
||||
s.Equal("Geometry", pbName)
|
||||
s.Equal("Geometry", pbType)
|
||||
})
|
||||
|
||||
s.Run("test_column_attribute", func() {
|
||||
s.Equal(columnName, column.Name())
|
||||
s.Equal(entity.FieldTypeGeometry, column.Type())
|
||||
s.Equal(columnLen, column.Len())
|
||||
s.EqualValues(v, column.Data())
|
||||
})
|
||||
|
||||
s.Run("test_column_field_data", func() {
|
||||
fd := column.FieldData()
|
||||
s.NotNil(fd)
|
||||
s.Equal(fd.GetFieldName(), columnName)
|
||||
})
|
||||
|
||||
s.Run("test_column_valuer_by_idx", func() {
|
||||
_, err := column.ValueByIdx(-1)
|
||||
s.Error(err)
|
||||
_, err = column.ValueByIdx(columnLen)
|
||||
s.Error(err)
|
||||
for i := 0; i < columnLen; i++ {
|
||||
v, err := column.ValueByIdx(i)
|
||||
s.NoError(err)
|
||||
s.Equal(column.values[i], v)
|
||||
}
|
||||
})
|
||||
|
||||
s.Run("test_append_value", func() {
|
||||
item := "POINT (30.123 -10.456)"
|
||||
err := column.AppendValue(item)
|
||||
s.NoError(err)
|
||||
s.Equal(columnLen+1, column.Len())
|
||||
val, err := column.ValueByIdx(columnLen)
|
||||
s.NoError(err)
|
||||
s.Equal(item, val)
|
||||
})
|
||||
}
|
||||
|
||||
func TestColumnGeometryWKT(t *testing.T) {
|
||||
suite.Run(t, new(ColumnGeometryWKTSuite))
|
||||
}
|
||||
@ -29,6 +29,7 @@ var (
|
||||
NewNullableColumnDouble NullableColumnCreateFunc[float64, *ColumnDouble] = NewNullableColumnCreator(NewColumnDouble).New
|
||||
NewNullableColumnTimestamptz NullableColumnCreateFunc[int64, *ColumnTimestamptz] = NewNullableColumnCreator(NewColumnTimestamptz).New
|
||||
NewNullableColumnJSONBytes NullableColumnCreateFunc[[]byte, *ColumnJSONBytes] = NewNullableColumnCreator(NewColumnJSONBytes).New
|
||||
NewNullableColumnGeometryWKT NullableColumnCreateFunc[string, *ColumnGeometryWKT] = NewNullableColumnCreator(NewColumnGeometryWKT).New
|
||||
// array
|
||||
NewNullableColumnBoolArray NullableColumnCreateFunc[[]bool, *ColumnBoolArray] = NewNullableColumnCreator(NewColumnBoolArray).New
|
||||
NewNullableColumnInt8Array NullableColumnCreateFunc[[]int8, *ColumnInt8Array] = NewNullableColumnCreator(NewColumnInt8Array).New
|
||||
|
||||
@ -56,6 +56,8 @@ func (t FieldType) Name() string {
|
||||
return "Array"
|
||||
case FieldTypeJSON:
|
||||
return "JSON"
|
||||
case FieldTypeGeometry:
|
||||
return "Geometry"
|
||||
case FieldTypeBinaryVector:
|
||||
return "BinaryVector"
|
||||
case FieldTypeFloatVector:
|
||||
@ -98,6 +100,8 @@ func (t FieldType) String() string {
|
||||
return "Array"
|
||||
case FieldTypeJSON:
|
||||
return "JSON"
|
||||
case FieldTypeGeometry:
|
||||
return "Geometry"
|
||||
case FieldTypeBinaryVector:
|
||||
return "[]byte"
|
||||
case FieldTypeFloatVector:
|
||||
@ -138,6 +142,8 @@ func (t FieldType) PbFieldType() (string, string) {
|
||||
return "VarChar", "string"
|
||||
case FieldTypeJSON:
|
||||
return "JSON", "JSON"
|
||||
case FieldTypeGeometry:
|
||||
return "Geometry", "Geometry"
|
||||
case FieldTypeBinaryVector:
|
||||
return "[]byte", ""
|
||||
case FieldTypeFloatVector:
|
||||
@ -181,6 +187,8 @@ const (
|
||||
FieldTypeArray FieldType = 22
|
||||
// FieldTypeJSON field type JSON
|
||||
FieldTypeJSON FieldType = 23
|
||||
// FieldTypeGeometry field type Geometry
|
||||
FieldTypeGeometry FieldType = 24
|
||||
// FieldTypeBinaryVector field type binary vector
|
||||
FieldTypeBinaryVector FieldType = 100
|
||||
// FieldTypeFloatVector field type float vector
|
||||
|
||||
@ -67,4 +67,5 @@ const (
|
||||
Sorted IndexType = "STL_SORT"
|
||||
Inverted IndexType = "INVERTED"
|
||||
BITMAP IndexType = "BITMAP"
|
||||
RTREE IndexType = "RTREE"
|
||||
)
|
||||
|
||||
70
client/index/rtree.go
Normal file
70
client/index/rtree.go
Normal file
@ -0,0 +1,70 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package index
|
||||
|
||||
var _ Index = rtreeIndex{}
|
||||
|
||||
// rtreeIndex represents an RTree index for geometry fields
|
||||
type rtreeIndex struct {
|
||||
baseIndex
|
||||
}
|
||||
|
||||
func (idx rtreeIndex) Params() map[string]string {
|
||||
params := map[string]string{
|
||||
IndexTypeKey: string(RTREE),
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
// NewRTreeIndex creates a new RTree index with default parameters
|
||||
func NewRTreeIndex() Index {
|
||||
return rtreeIndex{
|
||||
baseIndex: baseIndex{
|
||||
indexType: RTREE,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// NewRTreeIndexWithParams creates a new RTree index with custom parameters
|
||||
func NewRTreeIndexWithParams() Index {
|
||||
return rtreeIndex{
|
||||
baseIndex: baseIndex{
|
||||
indexType: RTREE,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// RTreeIndexBuilder provides a fluent API for building RTree indexes
|
||||
type RTreeIndexBuilder struct {
|
||||
index rtreeIndex
|
||||
}
|
||||
|
||||
// NewRTreeIndexBuilder creates a new RTree index builder
|
||||
func NewRTreeIndexBuilder() *RTreeIndexBuilder {
|
||||
return &RTreeIndexBuilder{
|
||||
index: rtreeIndex{
|
||||
baseIndex: baseIndex{
|
||||
indexType: RTREE,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Build returns the constructed RTree index
|
||||
func (b *RTreeIndexBuilder) Build() Index {
|
||||
return b.index
|
||||
}
|
||||
77
client/index/rtree_test.go
Normal file
77
client/index/rtree_test.go
Normal file
@ -0,0 +1,77 @@
|
||||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package index
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/suite"
|
||||
)
|
||||
|
||||
type RTreeIndexSuite struct {
|
||||
suite.Suite
|
||||
}
|
||||
|
||||
func (s *RTreeIndexSuite) TestNewRTreeIndex() {
|
||||
idx := NewRTreeIndex()
|
||||
s.Equal(RTREE, idx.IndexType())
|
||||
|
||||
params := idx.Params()
|
||||
s.Equal(string(RTREE), params[IndexTypeKey])
|
||||
}
|
||||
|
||||
func (s *RTreeIndexSuite) TestNewRTreeIndexWithParams() {
|
||||
idx := NewRTreeIndexWithParams()
|
||||
s.Equal(RTREE, idx.IndexType())
|
||||
|
||||
params := idx.Params()
|
||||
s.Equal(string(RTREE), params[IndexTypeKey])
|
||||
}
|
||||
|
||||
func (s *RTreeIndexSuite) TestRTreeIndexBuilder() {
|
||||
idx := NewRTreeIndexBuilder().
|
||||
Build()
|
||||
|
||||
s.Equal(RTREE, idx.IndexType())
|
||||
|
||||
params := idx.Params()
|
||||
s.Equal(string(RTREE), params[IndexTypeKey])
|
||||
}
|
||||
|
||||
func (s *RTreeIndexSuite) TestRTreeIndexBuilderDefaults() {
|
||||
idx := NewRTreeIndexBuilder().Build()
|
||||
s.Equal(RTREE, idx.IndexType())
|
||||
|
||||
params := idx.Params()
|
||||
s.Equal(string(RTREE), params[IndexTypeKey])
|
||||
}
|
||||
|
||||
func (s *RTreeIndexSuite) TestRTreeIndexBuilderChaining() {
|
||||
builder := NewRTreeIndexBuilder()
|
||||
|
||||
// Test method chaining
|
||||
result := builder.Build()
|
||||
|
||||
s.Equal(RTREE, result.IndexType())
|
||||
|
||||
params := result.Params()
|
||||
s.Equal(string(RTREE), params[IndexTypeKey])
|
||||
}
|
||||
|
||||
func TestRTreeIndex(t *testing.T) {
|
||||
suite.Run(t, new(RTreeIndexSuite))
|
||||
}
|
||||
@ -212,6 +212,22 @@ func (s *MockSuiteBase) getJSONBytesFieldData(name string, data [][]byte, isDyna
|
||||
}
|
||||
}
|
||||
|
||||
func (s *MockSuiteBase) getGeometryWktFieldData(name string, data []string) *schemapb.FieldData {
|
||||
return &schemapb.FieldData{
|
||||
Type: schemapb.DataType_Geometry,
|
||||
FieldName: name,
|
||||
Field: &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_GeometryWktData{
|
||||
GeometryWktData: &schemapb.GeometryWktArray{
|
||||
Data: data,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (s *MockSuiteBase) getFloatVectorFieldData(name string, dim int64, data []float32) *schemapb.FieldData {
|
||||
return &schemapb.FieldData{
|
||||
Type: schemapb.DataType_FloatVector,
|
||||
|
||||
6
go.mod
6
go.mod
@ -79,6 +79,7 @@ require (
|
||||
github.com/remeh/sizedwaitgroup v1.0.0
|
||||
github.com/shirou/gopsutil/v4 v4.24.10
|
||||
github.com/tidwall/gjson v1.17.1
|
||||
github.com/twpayne/go-geom v1.6.1
|
||||
github.com/valyala/fastjson v1.6.4
|
||||
github.com/zeebo/xxh3 v1.0.2
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.59.0
|
||||
@ -86,7 +87,6 @@ require (
|
||||
google.golang.org/protobuf v1.36.5
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
mosn.io/holmes v1.0.2
|
||||
mosn.io/pkg v0.0.0-20211217101631-d914102d1baf
|
||||
)
|
||||
|
||||
require (
|
||||
@ -152,8 +152,6 @@ require (
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/dubbogo/getty v1.3.4 // indirect
|
||||
github.com/dubbogo/gost v1.11.16 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
|
||||
github.com/ebitengine/purego v0.8.1 // indirect
|
||||
@ -197,7 +195,6 @@ require (
|
||||
github.com/ianlancetaylor/cgosymbolizer v0.0.0-20221217025313-27d3c9f66b6a // indirect
|
||||
github.com/jonboulle/clockwork v0.2.2 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/k0kubun/pp v3.0.1+incompatible // indirect
|
||||
github.com/klauspost/asmfmt v1.3.2 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
@ -302,6 +299,7 @@ require (
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20250321185631-1f6e0b77f77e // indirect
|
||||
mosn.io/api v0.0.0-20210204052134-5b9a826795fd // indirect
|
||||
mosn.io/pkg v0.0.0-20211217101631-d914102d1baf // indirect
|
||||
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 // indirect
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.4.2 // indirect
|
||||
sigs.k8s.io/yaml v1.4.0 // indirect
|
||||
|
||||
17
go.sum
17
go.sum
@ -90,6 +90,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3IMCy2vIlbG1XG/0ggNQv0SvxCAIpPM5b1nCz56Xno=
|
||||
github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMdUywE7VMo=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=
|
||||
github.com/DataDog/zstd v1.5.0 h1:+K/VEwIAaPcHiMtQvpLD4lqW7f0Gk3xdYZmI1hD+CXo=
|
||||
github.com/DataDog/zstd v1.5.0/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.25.0 h1:3c8yed4lgqTt+oTQ+JNMDo+F4xprBf+O/il4ZC0nRLw=
|
||||
@ -118,6 +120,10 @@ github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0R
|
||||
github.com/actgardner/gogen-avro/v9 v9.1.0/go.mod h1:nyTj6wPqDJoxM3qdnjcLv+EnMDSDFqE0qDpva2QRmKc=
|
||||
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
|
||||
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
|
||||
github.com/alecthomas/assert/v2 v2.10.0 h1:jjRCHsj6hBJhkmhznrCzoNpbA3zqy0fYiUcYZP/GkPY=
|
||||
github.com/alecthomas/assert/v2 v2.10.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
||||
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
|
||||
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
@ -303,11 +309,9 @@ github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj
|
||||
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
|
||||
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/dubbogo/getty v1.3.4 h1:5TvH213pnSIKYzY7IK8TT/r6yr5uPTB/U6YNLT+GsU0=
|
||||
github.com/dubbogo/getty v1.3.4/go.mod h1:36f+gH/ekaqcDWKbxNBQk9b9HXcGtaI6YHxp4YTntX8=
|
||||
github.com/dubbogo/go-zookeeper v1.0.3/go.mod h1:fn6n2CAEer3novYgk9ULLwAjuV8/g4DdC2ENwRb6E+c=
|
||||
github.com/dubbogo/gost v1.5.2/go.mod h1:pPTjVyoJan3aPxBPNUX0ADkXjPibLo+/Ib0/fADXSG8=
|
||||
github.com/dubbogo/gost v1.11.16 h1:fvOw8aKQ0BuUYuD+MaXAYFvT7tg2l7WAS5SL5gZJpFs=
|
||||
github.com/dubbogo/gost v1.11.16/go.mod h1:vIcP9rqz2KsXHPjsAwIUtfJIJjppQLQDcYaZTy/61jI=
|
||||
github.com/dubbogo/jsonparser v1.0.1/go.mod h1:tYAtpctvSP/tWw4MeelsowSPgXQRVHHWbqL6ynps8jU=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
@ -612,6 +616,8 @@ github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0m
|
||||
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/heetch/avro v0.3.1/go.mod h1:4xn38Oz/+hiEUTpbVfGVLfvOg0yKLlRP7Q9+gJJILgA=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/hydrogen18/memlistener v0.0.0-20200120041712-dcc25e7acd91/go.mod h1:qEIFzExnS6016fRpRfxrExeVn2gbClQA99gQhnIcdhE=
|
||||
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
|
||||
@ -659,7 +665,6 @@ github.com/juju/cmd v0.0.0-20171107070456-e74f39857ca0/go.mod h1:yWJQHl73rdSX4DH
|
||||
github.com/juju/collections v0.0.0-20200605021417-0d0ec82b7271/go.mod h1:5XgO71dV1JClcOJE+4dzdn4HrI5LiyKd7PlVG6eZYhY=
|
||||
github.com/juju/errors v0.0.0-20150916125642-1b5e39b83d18/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
|
||||
github.com/juju/errors v0.0.0-20190930114154-d42613fe1ab9/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
|
||||
github.com/juju/errors v0.0.0-20200330140219-3fe23663418f h1:MCOvExGLpaSIzLYB4iQXEHP4jYVU6vmzLNQPdMVrxnM=
|
||||
github.com/juju/errors v0.0.0-20200330140219-3fe23663418f/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
|
||||
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE=
|
||||
github.com/juju/httpprof v0.0.0-20141217160036-14bf14c30767/go.mod h1:+MaLYz4PumRkkyHYeXJ2G5g5cIW0sli2bOfpmbaMV/g=
|
||||
@ -684,9 +689,7 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V
|
||||
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
|
||||
github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
|
||||
github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
|
||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88 h1:uC1QfSlInpQF+M0ao65imhwqKnz3Q2z/d8PWZRMQvDM=
|
||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
|
||||
github.com/k0kubun/pp v3.0.1+incompatible h1:3tqvf7QgUnZ5tXO6pNAZlrvHgl6DvifjDrd9g2S9Z40=
|
||||
github.com/k0kubun/pp v3.0.1+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg=
|
||||
github.com/kataras/golog v0.0.10/go.mod h1:yJ8YKCmyL+nWjERB90Qwn+bdyBZsaQwU3bTVFgkFIp8=
|
||||
github.com/kataras/iris/v12 v12.1.8/go.mod h1:LMYy4VlP67TQ3Zgriz8RE2h2kMZV2SgMYbq3UhfoFmE=
|
||||
@ -791,8 +794,6 @@ github.com/milvus-io/cgosymbolizer v0.0.0-20250318084424-114f4050c3a6 h1:YHMFI6L
|
||||
github.com/milvus-io/cgosymbolizer v0.0.0-20250318084424-114f4050c3a6/go.mod h1:DvXTE/K/RtHehxU8/GtDs4vFtfw64jJ3PaCnFri8CRg=
|
||||
github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b h1:TfeY0NxYxZzUfIfYe5qYDBzt4ZYRqzUjTR6CvUzjat8=
|
||||
github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b/go.mod h1:iwW+9cWfIzzDseEBCCeDSN5SD16Tidvy8cwQ7ZY8Qj4=
|
||||
github.com/milvus-io/milvus-proto/go-api/v2 v2.6.2-0.20250911093549-4cc2bace3f8c h1:B7zmZ30lWHE4wNjT/g2NPe3q0gcUtw7cA5shMtWAmDc=
|
||||
github.com/milvus-io/milvus-proto/go-api/v2 v2.6.2-0.20250911093549-4cc2bace3f8c/go.mod h1:/6UT4zZl6awVeXLeE7UGDWZvXj3IWkRsh3mqsn0DiAs=
|
||||
github.com/milvus-io/milvus-proto/go-api/v2 v2.6.3-0.20250918113553-d15826602cc9 h1:7ojrhnBHitGaqebExGP00x0wDTioMgPniEBmNdFPiDI=
|
||||
github.com/milvus-io/milvus-proto/go-api/v2 v2.6.3-0.20250918113553-d15826602cc9/go.mod h1:/6UT4zZl6awVeXLeE7UGDWZvXj3IWkRsh3mqsn0DiAs=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
|
||||
@ -1104,6 +1105,8 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/twmb/murmur3 v1.1.6 h1:mqrRot1BRxm+Yct+vavLMou2/iJt0tNVTTC0QoIjaZg=
|
||||
github.com/twmb/murmur3 v1.1.6/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ=
|
||||
github.com/twpayne/go-geom v1.6.1 h1:iLE+Opv0Ihm/ABIcvQFGIiFBXd76oBIar9drAwHFhR4=
|
||||
github.com/twpayne/go-geom v1.6.1/go.mod h1:Kr+Nly6BswFsKM5sd31YaoWS5PeDDH2NftJTK7Gd028=
|
||||
github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaOOb6ThwMmTEbhRwtKR97o=
|
||||
github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
|
||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||
|
||||
@ -270,6 +270,8 @@ if ( BUILD_DISK_ANN STREQUAL "ON" )
|
||||
ADD_DEFINITIONS(-DBUILD_DISK_ANN=${BUILD_DISK_ANN})
|
||||
endif ()
|
||||
|
||||
ADD_DEFINITIONS(-DBOOST_GEOMETRY_INDEX_DETAIL_EXPERIMENTAL)
|
||||
|
||||
# Warning: add_subdirectory(src) must be after append_flags("-ftest-coverage"),
|
||||
# otherwise cpp code coverage tool will miss src folder
|
||||
add_subdirectory( thirdparty )
|
||||
|
||||
@ -6,10 +6,10 @@ class MilvusConan(ConanFile):
|
||||
settings = "os", "compiler", "build_type", "arch"
|
||||
requires = (
|
||||
"rocksdb/6.29.5@milvus/dev#b1842a53ddff60240c5282a3da498ba1",
|
||||
"boost/1.82.0#744a17160ebb5838e9115eab4d6d0c06",
|
||||
"boost/1.83.0@",
|
||||
"onetbb/2021.9.0#4a223ff1b4025d02f31b65aedf5e7f4a",
|
||||
"nlohmann_json/3.11.2#ffb9e9236619f1c883e36662f944345d",
|
||||
"zstd/1.5.4#308b8b048f9a3823ce248f9c150cc889",
|
||||
"nlohmann_json/3.11.3#ffb9e9236619f1c883e36662f944345d",
|
||||
"zstd/1.5.5#34e9debe03bf0964834a09dfbc31a5dd",
|
||||
"lz4/1.9.4#c5afb86edd69ac0df30e3a9e192e43db",
|
||||
"snappy/1.1.9#0519333fef284acd04806243de7d3070",
|
||||
"lzo/2.10#9517fc1bcc4d4cc229a79806003a1baa",
|
||||
@ -48,7 +48,8 @@ class MilvusConan(ConanFile):
|
||||
"simde/0.8.2#5e1edfd5cba92f25d79bf6ef4616b972",
|
||||
"xxhash/0.8.3#199e63ab9800302c232d030b27accec0",
|
||||
"unordered_dense/4.4.0#6a855c992618cc4c63019109a2e47298",
|
||||
"mongo-cxx-driver/3.11.0#ae206de0e90fb8cb2fb95465fb8b2f01"
|
||||
"mongo-cxx-driver/3.11.0#ae206de0e90fb8cb2fb95465fb8b2f01",
|
||||
"geos/3.12.0#0b177c90c25a8ca210578fb9e2899c37",
|
||||
)
|
||||
generators = ("cmake", "cmake_find_package")
|
||||
default_options = {
|
||||
@ -87,6 +88,8 @@ class MilvusConan(ConanFile):
|
||||
"fmt:header_only": True,
|
||||
"onetbb:tbbmalloc": False,
|
||||
"onetbb:tbbproxy": False,
|
||||
"gdal:shared": True,
|
||||
"gdal:fPIC": True,
|
||||
}
|
||||
|
||||
def configure(self):
|
||||
|
||||
@ -234,7 +234,9 @@ class Array {
|
||||
return true;
|
||||
}
|
||||
case DataType::STRING:
|
||||
case DataType::VARCHAR: {
|
||||
case DataType::VARCHAR:
|
||||
//treat Geometry as wkb string
|
||||
case DataType::GEOMETRY: {
|
||||
for (int i = 0; i < length_; ++i) {
|
||||
if (get_data<std::string_view>(i) !=
|
||||
arr.get_data<std::string_view>(i)) {
|
||||
@ -343,6 +345,13 @@ class Array {
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
for (int j = 0; j < length_; ++j) {
|
||||
auto element = get_data<std::string>(j);
|
||||
data_array.mutable_geometry_data()->add_data(element);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
// empty array
|
||||
}
|
||||
@ -430,7 +439,8 @@ class Array {
|
||||
return true;
|
||||
}
|
||||
case DataType::VARCHAR:
|
||||
case DataType::STRING: {
|
||||
case DataType::STRING:
|
||||
case DataType::GEOMETRY: {
|
||||
for (int i = 0; i < length_; i++) {
|
||||
auto val = get_data<std::string>(i);
|
||||
if (val != arr2.array(i).string_val()) {
|
||||
@ -580,6 +590,13 @@ class ArrayView {
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
for (int j = 0; j < length_; ++j) {
|
||||
auto element = get_data<std::string>(j);
|
||||
data_array.mutable_geometry_data()->add_data(element);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
// empty array
|
||||
}
|
||||
@ -664,7 +681,8 @@ class ArrayView {
|
||||
return true;
|
||||
}
|
||||
case DataType::VARCHAR:
|
||||
case DataType::STRING: {
|
||||
case DataType::STRING:
|
||||
case DataType::GEOMETRY: {
|
||||
for (int i = 0; i < length_; i++) {
|
||||
auto val = get_data<std::string>(i);
|
||||
if (val != arr2.array(i).string_val()) {
|
||||
|
||||
@ -36,6 +36,7 @@
|
||||
|
||||
namespace milvus {
|
||||
constexpr uint64_t MMAP_STRING_PADDING = 1;
|
||||
constexpr uint64_t MMAP_GEOMETRY_PADDING = 1;
|
||||
constexpr uint64_t MMAP_ARRAY_PADDING = 1;
|
||||
class Chunk {
|
||||
public:
|
||||
@ -279,6 +280,7 @@ class StringChunk : public Chunk {
|
||||
};
|
||||
|
||||
using JSONChunk = StringChunk;
|
||||
using GeometryChunk = StringChunk;
|
||||
|
||||
// An ArrayChunk is a class that represents a collection of arrays stored in a contiguous memory block.
|
||||
// It is initialized with the number of rows, a pointer to the data, the size of the data, the element type,
|
||||
|
||||
@ -27,6 +27,7 @@
|
||||
#include "common/FieldDataInterface.h"
|
||||
#include "common/FieldMeta.h"
|
||||
#include "common/File.h"
|
||||
#include "common/Geometry.h"
|
||||
#include "common/Types.h"
|
||||
#include "storage/Event.h"
|
||||
#include "storage/Util.h"
|
||||
@ -227,7 +228,8 @@ TEST(chunk, test_json_field) {
|
||||
auto ser_data = event_data.Serialize();
|
||||
|
||||
auto get_record_batch_reader =
|
||||
[&]() -> std::shared_ptr<::arrow::RecordBatchReader> {
|
||||
[&]() -> std::pair<std::shared_ptr<::arrow::RecordBatchReader>,
|
||||
std::unique_ptr<parquet::arrow::FileReader>> {
|
||||
auto buffer = std::make_shared<arrow::io::BufferReader>(
|
||||
ser_data.data() + 2 * sizeof(milvus::Timestamp),
|
||||
ser_data.size() - 2 * sizeof(milvus::Timestamp));
|
||||
@ -242,11 +244,11 @@ TEST(chunk, test_json_field) {
|
||||
std::shared_ptr<::arrow::RecordBatchReader> rb_reader;
|
||||
s = arrow_reader->GetRecordBatchReader(&rb_reader);
|
||||
EXPECT_TRUE(s.ok());
|
||||
return rb_reader;
|
||||
return {rb_reader, std::move(arrow_reader)};
|
||||
};
|
||||
|
||||
{
|
||||
auto rb_reader = get_record_batch_reader();
|
||||
auto [rb_reader, arrow_reader] = get_record_batch_reader();
|
||||
// nullable=false
|
||||
FieldMeta field_meta(FieldName("a"),
|
||||
milvus::FieldId(1),
|
||||
@ -276,7 +278,7 @@ TEST(chunk, test_json_field) {
|
||||
}
|
||||
}
|
||||
{
|
||||
auto rb_reader = get_record_batch_reader();
|
||||
auto [rb_reader, arrow_reader] = get_record_batch_reader();
|
||||
// nullable=true
|
||||
FieldMeta field_meta(FieldName("a"),
|
||||
milvus::FieldId(1),
|
||||
|
||||
@ -23,6 +23,7 @@
|
||||
#include "common/Chunk.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/FieldDataInterface.h"
|
||||
#include "common/Geometry.h"
|
||||
#include "common/Types.h"
|
||||
#include "common/VectorTrait.h"
|
||||
#include "simdjson/common_defs.h"
|
||||
@ -162,6 +163,72 @@ JSONChunkWriter::finish() {
|
||||
row_nums_, data, size, nullable_, std::move(mmap_file_raii));
|
||||
}
|
||||
|
||||
void
|
||||
GeometryChunkWriter::write(const arrow::ArrayVector& array_vec) {
|
||||
auto size = 0;
|
||||
std::vector<std::string> wkb_strs;
|
||||
// tuple <data, size, offset>
|
||||
std::vector<std::tuple<const uint8_t*, int64_t, int64_t>> null_bitmaps;
|
||||
for (const auto& data : array_vec) {
|
||||
auto array = std::dynamic_pointer_cast<arrow::BinaryArray>(data);
|
||||
for (int i = 0; i < array->length(); i++) {
|
||||
auto str = array->GetView(i);
|
||||
wkb_strs.emplace_back(str);
|
||||
size += str.size();
|
||||
}
|
||||
if (nullable_) {
|
||||
auto null_bitmap_n = (data->length() + 7) / 8;
|
||||
null_bitmaps.emplace_back(
|
||||
data->null_bitmap_data(), data->length(), data->offset());
|
||||
size += null_bitmap_n;
|
||||
}
|
||||
row_nums_ += array->length();
|
||||
}
|
||||
// use 32-bit offsets to align with StringChunk layout
|
||||
size += sizeof(uint32_t) * (row_nums_ + 1) + MMAP_GEOMETRY_PADDING;
|
||||
if (!file_path_.empty()) {
|
||||
target_ = std::make_shared<MmapChunkTarget>(file_path_);
|
||||
} else {
|
||||
target_ = std::make_shared<MemChunkTarget>(size);
|
||||
}
|
||||
|
||||
// chunk layout: null bitmap, offset1, offset2, ..., offsetn, wkb1, wkb2, ..., wkbn, padding
|
||||
// write null bitmaps
|
||||
write_null_bit_maps(null_bitmaps);
|
||||
|
||||
int offset_num = row_nums_ + 1;
|
||||
uint32_t offset_start_pos =
|
||||
static_cast<uint32_t>(target_->tell() + sizeof(uint32_t) * offset_num);
|
||||
std::vector<uint32_t> offsets;
|
||||
offsets.reserve(offset_num);
|
||||
|
||||
for (auto str : wkb_strs) {
|
||||
offsets.push_back(offset_start_pos);
|
||||
offset_start_pos += str.size();
|
||||
}
|
||||
offsets.push_back(offset_start_pos);
|
||||
|
||||
target_->write(offsets.data(), offsets.size() * sizeof(uint32_t));
|
||||
|
||||
for (auto str : wkb_strs) {
|
||||
target_->write(str.data(), str.size());
|
||||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<Chunk>
|
||||
GeometryChunkWriter::finish() {
|
||||
// write padding, maybe not needed anymore
|
||||
// FIXME
|
||||
char padding[MMAP_GEOMETRY_PADDING];
|
||||
target_->write(padding, MMAP_GEOMETRY_PADDING);
|
||||
auto [data, size] = target_->get();
|
||||
auto mmap_file_raii = file_path_.empty()
|
||||
? nullptr
|
||||
: std::make_unique<MmapFileRAII>(file_path_);
|
||||
return std::make_unique<GeometryChunk>(
|
||||
row_nums_, data, size, nullable_, std::move(mmap_file_raii));
|
||||
}
|
||||
|
||||
void
|
||||
ArrayChunkWriter::write(const arrow::ArrayVector& array_vec) {
|
||||
auto size = 0;
|
||||
@ -525,6 +592,10 @@ create_chunk_writer(const FieldMeta& field_meta, Args&&... args) {
|
||||
case milvus::DataType::JSON:
|
||||
return std::make_shared<JSONChunkWriter>(
|
||||
std::forward<Args>(args)..., nullable);
|
||||
case milvus::DataType::GEOMETRY: {
|
||||
return std::make_shared<GeometryChunkWriter>(
|
||||
std::forward<Args>(args)..., nullable);
|
||||
}
|
||||
case milvus::DataType::ARRAY:
|
||||
return std::make_shared<ArrayChunkWriter>(
|
||||
field_meta.get_element_type(),
|
||||
|
||||
@ -26,6 +26,7 @@
|
||||
|
||||
#include "storage/FileWriter.h"
|
||||
|
||||
#include "common/Geometry.h"
|
||||
namespace milvus {
|
||||
class ChunkWriterBase {
|
||||
public:
|
||||
@ -223,6 +224,16 @@ class JSONChunkWriter : public ChunkWriterBase {
|
||||
finish() override;
|
||||
};
|
||||
|
||||
class GeometryChunkWriter : public ChunkWriterBase {
|
||||
public:
|
||||
using ChunkWriterBase::ChunkWriterBase;
|
||||
void
|
||||
write(const arrow::ArrayVector& array_vec) override;
|
||||
|
||||
std::unique_ptr<Chunk>
|
||||
finish() override;
|
||||
};
|
||||
|
||||
class ArrayChunkWriter : public ChunkWriterBase {
|
||||
public:
|
||||
ArrayChunkWriter(const milvus::DataType element_type, bool nullable)
|
||||
|
||||
@ -260,6 +260,25 @@ FieldDataImpl<Type, is_type_entire_row>::FillFieldData(
|
||||
}
|
||||
return FillFieldData(values.data(), element_count);
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
AssertInfo(array->type()->id() == arrow::Type::type::BINARY,
|
||||
"inconsistent data type");
|
||||
auto geometry_array =
|
||||
std::dynamic_pointer_cast<arrow::BinaryArray>(array);
|
||||
AssertInfo(geometry_array != nullptr,
|
||||
"null geometry arrow binary array");
|
||||
std::vector<uint8_t> values(element_count);
|
||||
for (size_t index = 0; index < element_count; ++index) {
|
||||
values[index] = *geometry_array->GetValue(index, 0);
|
||||
}
|
||||
if (nullable_) {
|
||||
return FillFieldData(values.data(),
|
||||
array->null_bitmap_data(),
|
||||
element_count,
|
||||
array->offset());
|
||||
}
|
||||
return FillFieldData(values.data(), element_count);
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
auto array_array =
|
||||
std::dynamic_pointer_cast<arrow::BinaryArray>(array);
|
||||
@ -502,6 +521,17 @@ FieldDataImpl<Type, is_type_entire_row>::FillFieldData(
|
||||
return FillFieldData(
|
||||
values.data(), valid_data_ptr.get(), element_count, 0);
|
||||
}
|
||||
|
||||
case DataType::GEOMETRY: {
|
||||
FixedVector<std::string> values(element_count);
|
||||
if (default_value.has_value()) {
|
||||
std::fill(
|
||||
values.begin(), values.end(), default_value->string_data());
|
||||
return FillFieldData(values.data(), nullptr, element_count, 0);
|
||||
}
|
||||
return FillFieldData(
|
||||
values.data(), valid_data_ptr.get(), element_count, 0);
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
// todo: add array default_value
|
||||
FixedVector<Array> values(element_count);
|
||||
@ -528,6 +558,7 @@ template class FieldDataImpl<float, true>;
|
||||
template class FieldDataImpl<double, true>;
|
||||
template class FieldDataImpl<std::string, true>;
|
||||
template class FieldDataImpl<Json, true>;
|
||||
template class FieldDataImpl<Geometry, true>;
|
||||
template class FieldDataImpl<Array, true>;
|
||||
|
||||
// vector data
|
||||
@ -571,6 +602,9 @@ InitScalarFieldData(const DataType& type, bool nullable, int64_t cap_rows) {
|
||||
type, nullable, cap_rows);
|
||||
case DataType::JSON:
|
||||
return std::make_shared<FieldData<Json>>(type, nullable, cap_rows);
|
||||
case DataType::GEOMETRY:
|
||||
return std::make_shared<FieldData<std::string>>(
|
||||
type, nullable, cap_rows);
|
||||
default:
|
||||
ThrowInfo(DataTypeInvalid,
|
||||
"InitScalarFieldData not support data type " +
|
||||
|
||||
@ -69,6 +69,17 @@ class FieldData<Json> : public FieldDataJsonImpl {
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
class FieldData<Geometry> : public FieldDataGeometryImpl {
|
||||
public:
|
||||
static_assert(IsScalar<Geometry>);
|
||||
explicit FieldData(DataType data_type,
|
||||
bool nullable,
|
||||
int64_t buffered_num_rows = 0)
|
||||
: FieldDataGeometryImpl(data_type, nullable, buffered_num_rows) {
|
||||
}
|
||||
};
|
||||
|
||||
template <>
|
||||
class FieldData<Array> : public FieldDataArrayImpl {
|
||||
public:
|
||||
|
||||
@ -613,6 +613,79 @@ class FieldDataStringImpl : public FieldDataImpl<std::string, true> {
|
||||
}
|
||||
};
|
||||
|
||||
class FieldDataGeometryImpl : public FieldDataImpl<std::string, true> {
|
||||
public:
|
||||
explicit FieldDataGeometryImpl(DataType data_type,
|
||||
bool nullable,
|
||||
int64_t total_num_rows = 0)
|
||||
: FieldDataImpl<std::string, true>(
|
||||
1, data_type, nullable, total_num_rows) {
|
||||
}
|
||||
|
||||
int64_t
|
||||
DataSize() const override {
|
||||
int64_t data_size = 0;
|
||||
for (size_t offset = 0; offset < length(); ++offset) {
|
||||
data_size += data_[offset].size();
|
||||
}
|
||||
|
||||
return data_size;
|
||||
}
|
||||
|
||||
int64_t
|
||||
DataSize(ssize_t offset) const override {
|
||||
AssertInfo(offset < get_num_rows(),
|
||||
"field data subscript out of range");
|
||||
AssertInfo(offset < length(),
|
||||
"subscript position don't has valid value");
|
||||
return data_[offset].size();
|
||||
}
|
||||
void
|
||||
FillFieldData(const std::shared_ptr<arrow::Array> array) override {
|
||||
AssertInfo(array->type()->id() == arrow::Type::type::BINARY,
|
||||
"inconsistent data type, expected: {}, got: {}",
|
||||
"BINARY",
|
||||
array->type()->ToString());
|
||||
auto geometry_array =
|
||||
std::dynamic_pointer_cast<arrow::BinaryArray>(array);
|
||||
FillFieldData(geometry_array);
|
||||
}
|
||||
void
|
||||
FillFieldData(const std::shared_ptr<arrow::BinaryArray>& array) override {
|
||||
auto n = array->length();
|
||||
if (n == 0) {
|
||||
return;
|
||||
}
|
||||
null_count_ = array->null_count();
|
||||
|
||||
std::lock_guard lck(tell_mutex_);
|
||||
if (length_ + n > get_num_rows()) {
|
||||
resize_field_data(length_ + n);
|
||||
}
|
||||
auto i = 0;
|
||||
for (const auto& geometry : *array) {
|
||||
if (!geometry.has_value()) {
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
data_[length_ + i] = geometry.value();
|
||||
i++;
|
||||
}
|
||||
if (IsNullable()) {
|
||||
auto valid_data = array->null_bitmap_data();
|
||||
if (valid_data != nullptr) {
|
||||
bitset::detail::ElementWiseBitsetPolicy<uint8_t>::op_copy(
|
||||
valid_data,
|
||||
array->offset(),
|
||||
valid_data_.data(),
|
||||
length_,
|
||||
n);
|
||||
}
|
||||
}
|
||||
length_ += n;
|
||||
}
|
||||
};
|
||||
|
||||
class FieldDataJsonImpl : public FieldDataImpl<Json, true> {
|
||||
public:
|
||||
explicit FieldDataJsonImpl(DataType data_type,
|
||||
|
||||
301
internal/core/src/common/Geometry.h
Normal file
301
internal/core/src/common/Geometry.h
Normal file
@ -0,0 +1,301 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
#pragma once
|
||||
|
||||
#include <geos_c.h>
|
||||
#include <memory>
|
||||
#include <cmath>
|
||||
#include <string>
|
||||
#include "common/EasyAssert.h"
|
||||
|
||||
namespace milvus {
|
||||
|
||||
class Geometry {
|
||||
public:
|
||||
// Default constructor creates invalid geometry
|
||||
Geometry() : geometry_(nullptr), ctx_(nullptr) {
|
||||
}
|
||||
|
||||
~Geometry() {
|
||||
if (geometry_ != nullptr) {
|
||||
GEOSGeom_destroy_r(ctx_, geometry_);
|
||||
}
|
||||
}
|
||||
|
||||
// Constructor from WKB data
|
||||
explicit Geometry(GEOSContextHandle_t ctx, const void* wkb, size_t size)
|
||||
: ctx_(ctx) {
|
||||
GEOSWKBReader* reader = GEOSWKBReader_create_r(ctx);
|
||||
AssertInfo(reader != nullptr, "Failed to create GEOS WKB reader");
|
||||
|
||||
GEOSGeometry* geom = GEOSWKBReader_read_r(
|
||||
ctx, reader, static_cast<const unsigned char*>(wkb), size);
|
||||
GEOSWKBReader_destroy_r(ctx, reader);
|
||||
|
||||
AssertInfo(geom != nullptr,
|
||||
"Failed to construct geometry from WKB data");
|
||||
geometry_ = geom;
|
||||
}
|
||||
|
||||
// Constructor from WKT string
|
||||
explicit Geometry(GEOSContextHandle_t ctx, const char* wkt) : ctx_(ctx) {
|
||||
GEOSWKTReader* reader = GEOSWKTReader_create_r(ctx);
|
||||
AssertInfo(reader != nullptr, "Failed to create GEOS WKT reader");
|
||||
|
||||
GEOSGeometry* geom = GEOSWKTReader_read_r(ctx, reader, wkt);
|
||||
GEOSWKTReader_destroy_r(ctx, reader);
|
||||
|
||||
AssertInfo(geom != nullptr,
|
||||
"Failed to construct geometry from WKT data");
|
||||
geometry_ = geom;
|
||||
}
|
||||
|
||||
// Copy assignment
|
||||
Geometry&
|
||||
operator=(const Geometry& other) {
|
||||
if (this != &other) {
|
||||
geometry_ = other.geometry_;
|
||||
ctx_ = other.ctx_;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
// Copy constructor with context (for cloning)
|
||||
Geometry(const Geometry& other) : ctx_(other.ctx_) {
|
||||
if (other.IsValid()) {
|
||||
GEOSGeometry* cloned =
|
||||
GEOSGeom_clone_r(other.ctx_, other.geometry_);
|
||||
AssertInfo(cloned != nullptr, "Failed to clone geometry");
|
||||
geometry_ = cloned;
|
||||
} else {
|
||||
geometry_ = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
IsValid() const {
|
||||
return geometry_ != nullptr;
|
||||
}
|
||||
|
||||
// Get raw GEOS geometry pointer (for cache management)
|
||||
GEOSGeometry*
|
||||
GetRawGeometry() const {
|
||||
return geometry_;
|
||||
}
|
||||
|
||||
GEOSGeometry*
|
||||
GetGeometry() const {
|
||||
return geometry_;
|
||||
}
|
||||
|
||||
// Spatial relation operations using GEOS API
|
||||
bool
|
||||
equals(const Geometry& other) const {
|
||||
if (!IsValid() || !other.IsValid()) {
|
||||
return false;
|
||||
}
|
||||
char result = GEOSEquals_r(ctx_, geometry_, other.geometry_);
|
||||
return result == 1;
|
||||
}
|
||||
|
||||
bool
|
||||
touches(const Geometry& other) const {
|
||||
if (!IsValid() || !other.IsValid()) {
|
||||
return false;
|
||||
}
|
||||
char result = GEOSTouches_r(ctx_, geometry_, other.geometry_);
|
||||
return result == 1;
|
||||
}
|
||||
|
||||
bool
|
||||
overlaps(const Geometry& other) const {
|
||||
if (!IsValid() || !other.IsValid()) {
|
||||
return false;
|
||||
}
|
||||
char result = GEOSOverlaps_r(ctx_, geometry_, other.geometry_);
|
||||
return result == 1;
|
||||
}
|
||||
|
||||
bool
|
||||
crosses(const Geometry& other) const {
|
||||
if (!IsValid() || !other.IsValid()) {
|
||||
return false;
|
||||
}
|
||||
char result = GEOSCrosses_r(ctx_, geometry_, other.geometry_);
|
||||
return result == 1;
|
||||
}
|
||||
|
||||
bool
|
||||
contains(const Geometry& other) const {
|
||||
if (!IsValid() || !other.IsValid()) {
|
||||
return false;
|
||||
}
|
||||
char result = GEOSContains_r(ctx_, geometry_, other.geometry_);
|
||||
return result == 1;
|
||||
}
|
||||
|
||||
bool
|
||||
intersects(const Geometry& other) const {
|
||||
if (!IsValid() || !other.IsValid()) {
|
||||
return false;
|
||||
}
|
||||
char result = GEOSIntersects_r(ctx_, geometry_, other.geometry_);
|
||||
return result == 1;
|
||||
}
|
||||
|
||||
bool
|
||||
within(const Geometry& other) const {
|
||||
if (!IsValid() || !other.IsValid()) {
|
||||
return false;
|
||||
}
|
||||
char result = GEOSWithin_r(ctx_, geometry_, other.geometry_);
|
||||
return result == 1;
|
||||
}
|
||||
|
||||
// Distance within check using GEOS distance calculation
|
||||
bool
|
||||
dwithin(const Geometry& other, double distance) const {
|
||||
if (!IsValid() || !other.IsValid()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get geometry types
|
||||
int thisType = GEOSGeomTypeId_r(ctx_, geometry_);
|
||||
int otherType = GEOSGeomTypeId_r(ctx_, other.geometry_);
|
||||
|
||||
// Ensure other geometry is a point
|
||||
AssertInfo(otherType == GEOS_POINT, "other geometry is not a point");
|
||||
|
||||
// For point-to-point, use Haversine formula for accuracy
|
||||
if (thisType == GEOS_POINT) {
|
||||
double thisX, thisY, otherX, otherY;
|
||||
if (GEOSGeomGetX_r(ctx_, geometry_, &thisX) == 1 &&
|
||||
GEOSGeomGetY_r(ctx_, geometry_, &thisY) == 1 &&
|
||||
GEOSGeomGetX_r(ctx_, other.geometry_, &otherX) == 1 &&
|
||||
GEOSGeomGetY_r(ctx_, other.geometry_, &otherY) == 1) {
|
||||
double actual_distance =
|
||||
haversine_distance_meters(thisY, thisX, otherY, otherX);
|
||||
return actual_distance <= distance;
|
||||
}
|
||||
}
|
||||
|
||||
// For other geometry types, use GEOS distance (in degrees)
|
||||
double geos_distance;
|
||||
if (GEOSDistance_r(ctx_, geometry_, other.geometry_, &geos_distance) ==
|
||||
1) {
|
||||
// Get query point coordinates for conversion reference
|
||||
double query_lat, query_lon;
|
||||
if (GEOSGeomGetX_r(ctx_, other.geometry_, &query_lon) == 1 &&
|
||||
GEOSGeomGetY_r(ctx_, other.geometry_, &query_lat) == 1) {
|
||||
double distance_in_meters =
|
||||
degrees_to_meters_at_location(geos_distance, query_lat);
|
||||
return distance_in_meters <= distance;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private:
|
||||
// Convert degrees distance to meters using approximate location
|
||||
static double
|
||||
degrees_to_meters_at_location(double degrees_distance, double center_lat) {
|
||||
const double metersPerDegreeLat = 111320.0;
|
||||
|
||||
// For small distances, approximate using latitude-adjusted conversion
|
||||
double latRad = center_lat * 3.14159265358979323846 / 180.0;
|
||||
double avgMetersPerDegree =
|
||||
metersPerDegreeLat *
|
||||
std::sqrt((1.0 + std::cos(latRad) * std::cos(latRad)) / 2.0);
|
||||
|
||||
return degrees_distance * avgMetersPerDegree;
|
||||
}
|
||||
|
||||
// Haversine formula to calculate great-circle distance between two points on Earth
|
||||
static double
|
||||
haversine_distance_meters(double lat1,
|
||||
double lon1,
|
||||
double lat2,
|
||||
double lon2) {
|
||||
const double R = 6371000.0; // Earth's radius in meters
|
||||
const double PI = 3.14159265358979323846;
|
||||
|
||||
// Convert degrees to radians
|
||||
double lat1_rad = lat1 * PI / 180.0;
|
||||
double lon1_rad = lon1 * PI / 180.0;
|
||||
double lat2_rad = lat2 * PI / 180.0;
|
||||
double lon2_rad = lon2 * PI / 180.0;
|
||||
|
||||
// Haversine formula
|
||||
double dlat = lat2_rad - lat1_rad;
|
||||
double dlon = lon2_rad - lon1_rad;
|
||||
|
||||
double a = std::sin(dlat / 2.0) * std::sin(dlat / 2.0) +
|
||||
std::cos(lat1_rad) * std::cos(lat2_rad) *
|
||||
std::sin(dlon / 2.0) * std::sin(dlon / 2.0);
|
||||
double c = 2.0 * std::atan2(std::sqrt(a), std::sqrt(1.0 - a));
|
||||
|
||||
return R * c; // Distance in meters
|
||||
}
|
||||
|
||||
public:
|
||||
// Export to WKT string
|
||||
std::string
|
||||
to_wkt_string() const {
|
||||
if (!IsValid()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
GEOSWKTWriter* writer = GEOSWKTWriter_create_r(ctx_);
|
||||
AssertInfo(writer != nullptr, "Failed to create GEOS WKT writer");
|
||||
|
||||
char* wkt = GEOSWKTWriter_write_r(ctx_, writer, geometry_);
|
||||
GEOSWKTWriter_destroy_r(ctx_, writer);
|
||||
|
||||
if (!wkt) {
|
||||
return "";
|
||||
}
|
||||
|
||||
std::string result(wkt);
|
||||
GEOSFree_r(ctx_, wkt);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Export to WKB string (for test)
|
||||
std::string
|
||||
to_wkb_string() const {
|
||||
if (!IsValid()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
GEOSWKBWriter* writer = GEOSWKBWriter_create_r(ctx_);
|
||||
AssertInfo(writer != nullptr, "Failed to create GEOS WKB writer");
|
||||
|
||||
size_t size;
|
||||
unsigned char* wkb =
|
||||
GEOSWKBWriter_write_r(ctx_, writer, geometry_, &size);
|
||||
GEOSWKBWriter_destroy_r(ctx_, writer);
|
||||
|
||||
if (!wkb) {
|
||||
ThrowInfo(UnexpectedError, "Failed to create GEOS WKB writer");
|
||||
}
|
||||
|
||||
std::string result(reinterpret_cast<const char*>(wkb), size);
|
||||
GEOSFree_r(ctx_, wkb);
|
||||
return result;
|
||||
}
|
||||
|
||||
private:
|
||||
GEOSGeometry* geometry_; // Raw pointer, managed by cache
|
||||
GEOSContextHandle_t ctx_;
|
||||
};
|
||||
|
||||
} // namespace milvus
|
||||
209
internal/core/src/common/GeometryCache.h
Normal file
209
internal/core/src/common/GeometryCache.h
Normal file
@ -0,0 +1,209 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <shared_mutex>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/Geometry.h"
|
||||
#include "common/Types.h"
|
||||
#include "geos_c.h"
|
||||
#include "log/Log.h"
|
||||
|
||||
namespace milvus {
|
||||
namespace exec {
|
||||
|
||||
// Helper function to create cache key from segment_id and field_id
|
||||
inline std::string
|
||||
MakeCacheKey(int64_t segment_id, FieldId field_id) {
|
||||
return std::to_string(segment_id) + "_" + std::to_string(field_id.get());
|
||||
}
|
||||
|
||||
// Vector-based Geometry cache that maintains original field data order
|
||||
class SimpleGeometryCache {
|
||||
public:
|
||||
// Append WKB data during field loading
|
||||
void
|
||||
AppendData(GEOSContextHandle_t ctx, const char* wkb_data, size_t size) {
|
||||
std::lock_guard<std::shared_mutex> lock(mutex_);
|
||||
|
||||
if (size == 0 || wkb_data == nullptr) {
|
||||
// Handle null/empty geometry - add invalid geometry
|
||||
geometries_.emplace_back();
|
||||
} else {
|
||||
try {
|
||||
// Create geometry with cache's context
|
||||
geometries_.emplace_back(ctx, wkb_data, size);
|
||||
} catch (const std::exception& e) {
|
||||
ThrowInfo(UnexpectedError,
|
||||
"Failed to construct geometry from WKB data: {}",
|
||||
e.what());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get shared lock for batch operations (RAII)
|
||||
std::shared_lock<std::shared_mutex>
|
||||
AcquireReadLock() const {
|
||||
return std::shared_lock<std::shared_mutex>(mutex_);
|
||||
}
|
||||
|
||||
// Get Geometry by offset without locking (use with AcquireReadLock)
|
||||
const Geometry*
|
||||
GetByOffsetUnsafe(size_t offset) const {
|
||||
if (offset >= geometries_.size()) {
|
||||
ThrowInfo(UnexpectedError,
|
||||
"offset {} is out of range: {}",
|
||||
offset,
|
||||
geometries_.size());
|
||||
}
|
||||
|
||||
const auto& geometry = geometries_[offset];
|
||||
return geometry.IsValid() ? &geometry : nullptr;
|
||||
}
|
||||
|
||||
// Get Geometry by offset (thread-safe read for filtering)
|
||||
const Geometry*
|
||||
GetByOffset(size_t offset) const {
|
||||
std::shared_lock<std::shared_mutex> lock(mutex_);
|
||||
return GetByOffsetUnsafe(offset);
|
||||
}
|
||||
|
||||
// Get total number of loaded geometries
|
||||
size_t
|
||||
Size() const {
|
||||
std::shared_lock<std::shared_mutex> lock(mutex_);
|
||||
return geometries_.size();
|
||||
}
|
||||
|
||||
// Check if cache is loaded
|
||||
bool
|
||||
IsLoaded() const {
|
||||
std::shared_lock<std::shared_mutex> lock(mutex_);
|
||||
return !geometries_.empty();
|
||||
}
|
||||
|
||||
private:
|
||||
mutable std::shared_mutex mutex_; // For read/write operations
|
||||
std::vector<Geometry> geometries_; // Direct storage of Geometry objects
|
||||
};
|
||||
|
||||
// Global cache instance per segment+field
|
||||
class SimpleGeometryCacheManager {
|
||||
public:
|
||||
static SimpleGeometryCacheManager&
|
||||
Instance() {
|
||||
static SimpleGeometryCacheManager instance;
|
||||
return instance;
|
||||
}
|
||||
|
||||
SimpleGeometryCacheManager() = default;
|
||||
|
||||
SimpleGeometryCache&
|
||||
GetCache(int64_t segment_id, FieldId field_id) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
auto key = MakeCacheKey(segment_id, field_id);
|
||||
auto it = caches_.find(key);
|
||||
if (it != caches_.end()) {
|
||||
return *(it->second);
|
||||
}
|
||||
|
||||
auto cache = std::make_unique<SimpleGeometryCache>();
|
||||
auto* cache_ptr = cache.get();
|
||||
caches_.emplace(key, std::move(cache));
|
||||
return *cache_ptr;
|
||||
}
|
||||
|
||||
void
|
||||
RemoveCache(GEOSContextHandle_t ctx, int64_t segment_id, FieldId field_id) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
auto key = MakeCacheKey(segment_id, field_id);
|
||||
caches_.erase(key);
|
||||
}
|
||||
|
||||
// Remove all caches for a segment (useful when segment is destroyed)
|
||||
void
|
||||
RemoveSegmentCaches(GEOSContextHandle_t ctx, int64_t segment_id) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
auto segment_prefix = std::to_string(segment_id) + "_";
|
||||
auto it = caches_.begin();
|
||||
while (it != caches_.end()) {
|
||||
if (it->first.substr(0, segment_prefix.length()) ==
|
||||
segment_prefix) {
|
||||
it = caches_.erase(it);
|
||||
} else {
|
||||
++it;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get cache statistics for monitoring
|
||||
struct CacheStats {
|
||||
size_t total_caches = 0;
|
||||
size_t loaded_caches = 0;
|
||||
size_t total_geometries = 0;
|
||||
};
|
||||
|
||||
CacheStats
|
||||
GetStats() const {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
CacheStats stats;
|
||||
stats.total_caches = caches_.size();
|
||||
for (const auto& [key, cache] : caches_) {
|
||||
if (cache->IsLoaded()) {
|
||||
stats.loaded_caches++;
|
||||
stats.total_geometries += cache->Size();
|
||||
}
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
|
||||
private:
|
||||
SimpleGeometryCacheManager(const SimpleGeometryCacheManager&) = delete;
|
||||
SimpleGeometryCacheManager&
|
||||
operator=(const SimpleGeometryCacheManager&) = delete;
|
||||
|
||||
mutable std::mutex mutex_;
|
||||
std::unordered_map<std::string, std::unique_ptr<SimpleGeometryCache>>
|
||||
caches_;
|
||||
};
|
||||
|
||||
} // namespace exec
|
||||
|
||||
// Convenient global functions for direct access to geometry cache
|
||||
inline const Geometry*
|
||||
GetGeometryByOffset(int64_t segment_id, FieldId field_id, size_t offset) {
|
||||
auto& cache = exec::SimpleGeometryCacheManager::Instance().GetCache(
|
||||
segment_id, field_id);
|
||||
return cache.GetByOffset(offset);
|
||||
}
|
||||
|
||||
inline void
|
||||
RemoveGeometryCache(GEOSContextHandle_t ctx,
|
||||
int64_t segment_id,
|
||||
FieldId field_id) {
|
||||
exec::SimpleGeometryCacheManager::Instance().RemoveCache(
|
||||
ctx, segment_id, field_id);
|
||||
}
|
||||
|
||||
inline void
|
||||
RemoveSegmentGeometryCaches(GEOSContextHandle_t ctx, int64_t segment_id) {
|
||||
exec::SimpleGeometryCacheManager::Instance().RemoveSegmentCaches(
|
||||
ctx, segment_id);
|
||||
}
|
||||
|
||||
} // namespace milvus
|
||||
@ -31,9 +31,9 @@ constexpr bool IsVector = std::is_base_of_v<VectorTrait, T>;
|
||||
template <typename T>
|
||||
constexpr bool IsScalar =
|
||||
std::is_fundamental_v<T> || std::is_same_v<T, std::string> ||
|
||||
std::is_same_v<T, Json> || std::is_same_v<T, std::string_view> ||
|
||||
std::is_same_v<T, Array> || std::is_same_v<T, ArrayView> ||
|
||||
std::is_same_v<T, proto::plan::Array>;
|
||||
std::is_same_v<T, Json> || std::is_same_v<T, Geometry> ||
|
||||
std::is_same_v<T, std::string_view> || std::is_same_v<T, Array> ||
|
||||
std::is_same_v<T, ArrayView> || std::is_same_v<T, proto::plan::Array>;
|
||||
|
||||
template <typename T>
|
||||
constexpr bool IsSparse =
|
||||
|
||||
@ -48,6 +48,7 @@
|
||||
#include "pb/segcore.pb.h"
|
||||
#include "Json.h"
|
||||
#include "type_c.h"
|
||||
#include "Geometry.h"
|
||||
|
||||
#include "CustomBitset.h"
|
||||
|
||||
@ -80,7 +81,7 @@ enum class DataType {
|
||||
VARCHAR = 21,
|
||||
ARRAY = 22,
|
||||
JSON = 23,
|
||||
// GEOMETRY = 24 // reserved in proto
|
||||
GEOMETRY = 24,
|
||||
TEXT = 25,
|
||||
TIMESTAMPTZ = 26, // Timestamp with timezone, stored as int64
|
||||
|
||||
@ -181,6 +182,8 @@ GetArrowDataType(DataType data_type, int dim = 1) {
|
||||
case DataType::ARRAY:
|
||||
case DataType::JSON:
|
||||
return arrow::binary();
|
||||
case DataType::GEOMETRY:
|
||||
return arrow::binary();
|
||||
case DataType::VECTOR_FLOAT:
|
||||
return arrow::fixed_size_binary(dim * 4);
|
||||
case DataType::VECTOR_BINARY: {
|
||||
@ -257,6 +260,8 @@ GetDataTypeName(DataType data_type) {
|
||||
return "json";
|
||||
case DataType::TEXT:
|
||||
return "text";
|
||||
case DataType::GEOMETRY:
|
||||
return "geometry";
|
||||
case DataType::VECTOR_FLOAT:
|
||||
return "vector_float";
|
||||
case DataType::VECTOR_BINARY:
|
||||
@ -297,6 +302,7 @@ using GroupByValueType = std::optional<std::variant<std::monostate,
|
||||
std::string>>;
|
||||
using ContainsType = proto::plan::JSONContainsExpr_JSONOp;
|
||||
using NullExprType = proto::plan::NullExpr_NullOp;
|
||||
using GISFunctionType = proto::plan::GISFunctionFilterExpr_GISOp;
|
||||
|
||||
inline bool
|
||||
IsPrimaryKeyDataType(DataType data_type) {
|
||||
@ -350,6 +356,11 @@ IsJsonDataType(DataType data_type) {
|
||||
return data_type == DataType::JSON;
|
||||
}
|
||||
|
||||
inline bool
|
||||
IsGeometryDataType(DataType data_type) {
|
||||
return data_type == DataType::GEOMETRY;
|
||||
}
|
||||
|
||||
inline bool
|
||||
IsArrayDataType(DataType data_type) {
|
||||
return data_type == DataType::ARRAY || data_type == DataType::VECTOR_ARRAY;
|
||||
@ -357,7 +368,8 @@ IsArrayDataType(DataType data_type) {
|
||||
|
||||
inline bool
|
||||
IsBinaryDataType(DataType data_type) {
|
||||
return IsJsonDataType(data_type) || IsArrayDataType(data_type);
|
||||
return IsJsonDataType(data_type) || IsArrayDataType(data_type) ||
|
||||
IsGeometryDataType(data_type);
|
||||
}
|
||||
|
||||
inline bool
|
||||
@ -383,6 +395,11 @@ IsJsonType(proto::schema::DataType type) {
|
||||
return type == proto::schema::DataType::JSON;
|
||||
}
|
||||
|
||||
inline bool
|
||||
IsGeometryType(DataType data_type) {
|
||||
return data_type == DataType::GEOMETRY;
|
||||
}
|
||||
|
||||
inline bool
|
||||
IsArrayType(proto::schema::DataType type) {
|
||||
return type == proto::schema::DataType::Array;
|
||||
@ -667,6 +684,15 @@ struct TypeTraits<DataType::JSON> {
|
||||
static constexpr const char* Name = "JSON";
|
||||
};
|
||||
|
||||
template <>
|
||||
struct TypeTraits<DataType::GEOMETRY> {
|
||||
using NativeType = void;
|
||||
static constexpr DataType TypeKind = DataType::GEOMETRY;
|
||||
static constexpr bool IsPrimitiveType = false;
|
||||
static constexpr bool IsFixedWidth = false;
|
||||
static constexpr const char* Name = "GEOMETRY";
|
||||
};
|
||||
|
||||
template <>
|
||||
struct TypeTraits<DataType::ROW> {
|
||||
using NativeType = void;
|
||||
@ -769,6 +795,9 @@ struct fmt::formatter<milvus::DataType> : formatter<string_view> {
|
||||
case milvus::DataType::JSON:
|
||||
name = "JSON";
|
||||
break;
|
||||
case milvus::DataType::GEOMETRY:
|
||||
name = "GEOMETRY";
|
||||
break;
|
||||
case milvus::DataType::ROW:
|
||||
name = "ROW";
|
||||
break;
|
||||
|
||||
@ -25,12 +25,14 @@
|
||||
#include "exec/expression/CompareExpr.h"
|
||||
#include "exec/expression/ConjunctExpr.h"
|
||||
#include "exec/expression/ExistsExpr.h"
|
||||
#include "exec/expression/GISFunctionFilterExpr.h"
|
||||
#include "exec/expression/JsonContainsExpr.h"
|
||||
#include "exec/expression/LogicalBinaryExpr.h"
|
||||
#include "exec/expression/LogicalUnaryExpr.h"
|
||||
#include "exec/expression/NullExpr.h"
|
||||
#include "exec/expression/TermExpr.h"
|
||||
#include "exec/expression/UnaryExpr.h"
|
||||
#include "expr/ITypeExpr.h"
|
||||
#include "exec/expression/ValueExpr.h"
|
||||
#include "exec/expression/TimestamptzArithCompareExpr.h"
|
||||
#include "expr/ITypeExpr.h"
|
||||
@ -48,7 +50,6 @@ ExprSet::Eval(int32_t begin,
|
||||
EvalCtx& context,
|
||||
std::vector<VectorPtr>& results) {
|
||||
results.resize(exprs_.size());
|
||||
|
||||
for (size_t i = begin; i < end; ++i) {
|
||||
exprs_[i]->Eval(context, results[i]);
|
||||
}
|
||||
@ -332,6 +333,17 @@ CompileExpression(const expr::TypedExprPtr& expr,
|
||||
context->get_active_count(),
|
||||
context->query_config()->get_expr_batch_size(),
|
||||
context->get_consistency_level());
|
||||
} else if (auto casted_expr = std::dynamic_pointer_cast<
|
||||
const milvus::expr::GISFunctionFilterExpr>(expr)) {
|
||||
result = std::make_shared<PhyGISFunctionFilterExpr>(
|
||||
compiled_inputs,
|
||||
casted_expr,
|
||||
"PhyGISFunctionFilterExpr",
|
||||
op_ctx,
|
||||
context->get_segment(),
|
||||
context->get_active_count(),
|
||||
context->query_config()->get_expr_batch_size(),
|
||||
context->get_consistency_level());
|
||||
} else {
|
||||
ThrowInfo(ExprInvalid, "unsupport expr: ", expr->ToString());
|
||||
}
|
||||
|
||||
@ -342,7 +342,10 @@ class SegmentExpr : public Expr {
|
||||
// used for processing raw data expr for sealed segments.
|
||||
// now only used for std::string_view && json
|
||||
// TODO: support more types
|
||||
template <typename T, typename FUNC, typename... ValTypes>
|
||||
template <typename T,
|
||||
bool NeedSegmentOffsets = false,
|
||||
typename FUNC,
|
||||
typename... ValTypes>
|
||||
int64_t
|
||||
ProcessChunkForSealedSeg(
|
||||
FUNC func,
|
||||
@ -362,6 +365,22 @@ class SegmentExpr : public Expr {
|
||||
if (!skip_func || !skip_func(skip_index, field_id_, 0)) {
|
||||
// first is the raw data, second is valid_data
|
||||
// use valid_data to see if raw data is null
|
||||
if constexpr (NeedSegmentOffsets) {
|
||||
// For GIS functions: construct segment offsets array
|
||||
std::vector<int32_t> segment_offsets_array(need_size);
|
||||
for (int64_t j = 0; j < need_size; ++j) {
|
||||
segment_offsets_array[j] =
|
||||
static_cast<int32_t>(current_data_chunk_pos_ + j);
|
||||
}
|
||||
func(views_info.first.data(),
|
||||
views_info.second.data(),
|
||||
nullptr,
|
||||
segment_offsets_array.data(),
|
||||
need_size,
|
||||
res,
|
||||
valid_res,
|
||||
values...);
|
||||
} else {
|
||||
func(views_info.first.data(),
|
||||
views_info.second.data(),
|
||||
nullptr,
|
||||
@ -369,6 +388,7 @@ class SegmentExpr : public Expr {
|
||||
res,
|
||||
valid_res,
|
||||
values...);
|
||||
}
|
||||
} else {
|
||||
ApplyValidData(views_info.second.data(), res, valid_res, need_size);
|
||||
}
|
||||
@ -629,7 +649,11 @@ class SegmentExpr : public Expr {
|
||||
return input->size();
|
||||
}
|
||||
|
||||
template <typename T, typename FUNC, typename... ValTypes>
|
||||
// Template parameter to control whether segment offsets are needed (for GIS functions)
|
||||
template <typename T,
|
||||
bool NeedSegmentOffsets = false,
|
||||
typename FUNC,
|
||||
typename... ValTypes>
|
||||
int64_t
|
||||
ProcessDataChunksForSingleChunk(
|
||||
FUNC func,
|
||||
@ -641,7 +665,7 @@ class SegmentExpr : public Expr {
|
||||
if constexpr (std::is_same_v<T, std::string_view> ||
|
||||
std::is_same_v<T, Json>) {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
return ProcessChunkForSealedSeg<T>(
|
||||
return ProcessChunkForSealedSeg<T, NeedSegmentOffsets>(
|
||||
func, skip_func, res, valid_res, values...);
|
||||
}
|
||||
}
|
||||
@ -667,8 +691,26 @@ class SegmentExpr : public Expr {
|
||||
if (valid_data != nullptr) {
|
||||
valid_data += data_pos;
|
||||
}
|
||||
|
||||
if (!skip_func || !skip_func(skip_index, field_id_, i)) {
|
||||
const T* data = chunk.data() + data_pos;
|
||||
|
||||
if constexpr (NeedSegmentOffsets) {
|
||||
// For GIS functions: construct segment offsets array
|
||||
std::vector<int32_t> segment_offsets_array(size);
|
||||
for (int64_t j = 0; j < size; ++j) {
|
||||
segment_offsets_array[j] = static_cast<int32_t>(
|
||||
size_per_chunk_ * i + data_pos + j);
|
||||
}
|
||||
func(data,
|
||||
valid_data,
|
||||
nullptr,
|
||||
segment_offsets_array.data(),
|
||||
size,
|
||||
res + processed_size,
|
||||
valid_res + processed_size,
|
||||
values...);
|
||||
} else {
|
||||
func(data,
|
||||
valid_data,
|
||||
nullptr,
|
||||
@ -676,6 +718,7 @@ class SegmentExpr : public Expr {
|
||||
res + processed_size,
|
||||
valid_res + processed_size,
|
||||
values...);
|
||||
}
|
||||
} else {
|
||||
ApplyValidData(valid_data,
|
||||
res + processed_size,
|
||||
@ -695,7 +738,10 @@ class SegmentExpr : public Expr {
|
||||
}
|
||||
|
||||
// If process_all_chunks is true, all chunks will be processed and no inner state will be changed.
|
||||
template <typename T, typename FUNC, typename... ValTypes>
|
||||
template <typename T,
|
||||
bool NeedSegmentOffsets = false,
|
||||
typename FUNC,
|
||||
typename... ValTypes>
|
||||
int64_t
|
||||
ProcessMultipleChunksCommon(
|
||||
FUNC func,
|
||||
@ -723,7 +769,13 @@ class SegmentExpr : public Expr {
|
||||
|
||||
if (size == 0)
|
||||
continue; //do not go empty-loop at the bound of the chunk
|
||||
|
||||
std::vector<int32_t> segment_offsets_array(size);
|
||||
auto start_offset =
|
||||
segment_->num_rows_until_chunk(field_id_, i) + data_pos;
|
||||
for (int64_t j = 0; j < size; ++j) {
|
||||
int64_t offset = start_offset + j;
|
||||
segment_offsets_array[j] = static_cast<int32_t>(offset);
|
||||
}
|
||||
auto& skip_index = segment_->GetSkipIndex();
|
||||
if (!skip_func || !skip_func(skip_index, field_id_, i)) {
|
||||
bool is_seal = false;
|
||||
@ -737,6 +789,16 @@ class SegmentExpr : public Expr {
|
||||
op_ctx_, field_id_, i, data_pos, size);
|
||||
auto [data_vec, valid_data] = pw.get();
|
||||
|
||||
if constexpr (NeedSegmentOffsets) {
|
||||
func(data_vec.data(),
|
||||
valid_data.data(),
|
||||
nullptr,
|
||||
segment_offsets_array.data(),
|
||||
size,
|
||||
res + processed_size,
|
||||
valid_res + processed_size,
|
||||
values...);
|
||||
} else {
|
||||
func(data_vec.data(),
|
||||
valid_data.data(),
|
||||
nullptr,
|
||||
@ -744,6 +806,8 @@ class SegmentExpr : public Expr {
|
||||
res + processed_size,
|
||||
valid_res + processed_size,
|
||||
values...);
|
||||
}
|
||||
|
||||
is_seal = true;
|
||||
}
|
||||
}
|
||||
@ -755,6 +819,18 @@ class SegmentExpr : public Expr {
|
||||
if (valid_data != nullptr) {
|
||||
valid_data += data_pos;
|
||||
}
|
||||
|
||||
if constexpr (NeedSegmentOffsets) {
|
||||
// For GIS functions: construct segment offsets array
|
||||
func(data,
|
||||
valid_data,
|
||||
nullptr,
|
||||
segment_offsets_array.data(),
|
||||
size,
|
||||
res + processed_size,
|
||||
valid_res + processed_size,
|
||||
values...);
|
||||
} else {
|
||||
func(data,
|
||||
valid_data,
|
||||
nullptr,
|
||||
@ -763,6 +839,7 @@ class SegmentExpr : public Expr {
|
||||
valid_res + processed_size,
|
||||
values...);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const bool* valid_data;
|
||||
if constexpr (std::is_same_v<T, std::string_view> ||
|
||||
@ -801,7 +878,10 @@ class SegmentExpr : public Expr {
|
||||
return processed_size;
|
||||
}
|
||||
|
||||
template <typename T, typename FUNC, typename... ValTypes>
|
||||
template <typename T,
|
||||
bool NeedSegmentOffsets = false,
|
||||
typename FUNC,
|
||||
typename... ValTypes>
|
||||
int64_t
|
||||
ProcessDataChunksForMultipleChunk(
|
||||
FUNC func,
|
||||
@ -809,7 +889,7 @@ class SegmentExpr : public Expr {
|
||||
TargetBitmapView res,
|
||||
TargetBitmapView valid_res,
|
||||
ValTypes... values) {
|
||||
return ProcessMultipleChunksCommon<T>(
|
||||
return ProcessMultipleChunksCommon<T, NeedSegmentOffsets>(
|
||||
func, skip_func, res, valid_res, false, values...);
|
||||
}
|
||||
|
||||
@ -825,7 +905,10 @@ class SegmentExpr : public Expr {
|
||||
func, skip_func, res, valid_res, true, values...);
|
||||
}
|
||||
|
||||
template <typename T, typename FUNC, typename... ValTypes>
|
||||
template <typename T,
|
||||
bool NeedSegmentOffsets = false,
|
||||
typename FUNC,
|
||||
typename... ValTypes>
|
||||
int64_t
|
||||
ProcessDataChunks(
|
||||
FUNC func,
|
||||
@ -834,10 +917,10 @@ class SegmentExpr : public Expr {
|
||||
TargetBitmapView valid_res,
|
||||
ValTypes... values) {
|
||||
if (segment_->is_chunked()) {
|
||||
return ProcessDataChunksForMultipleChunk<T>(
|
||||
return ProcessDataChunksForMultipleChunk<T, NeedSegmentOffsets>(
|
||||
func, skip_func, res, valid_res, values...);
|
||||
} else {
|
||||
return ProcessDataChunksForSingleChunk<T>(
|
||||
return ProcessDataChunksForSingleChunk<T, NeedSegmentOffsets>(
|
||||
func, skip_func, res, valid_res, values...);
|
||||
}
|
||||
}
|
||||
@ -993,6 +1076,9 @@ class SegmentExpr : public Expr {
|
||||
case DataType::VARCHAR: {
|
||||
return ProcessIndexChunksForValid<std::string>();
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
return ProcessIndexChunksForValid<std::string>();
|
||||
}
|
||||
default:
|
||||
ThrowInfo(DataTypeInvalid,
|
||||
"unsupported element type: {}",
|
||||
@ -1056,6 +1142,10 @@ class SegmentExpr : public Expr {
|
||||
return ProcessChunksForValidByOffsets<std::string>(
|
||||
use_index, input);
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
return ProcessChunksForValidByOffsets<std::string>(
|
||||
use_index, input);
|
||||
}
|
||||
default:
|
||||
ThrowInfo(DataTypeInvalid,
|
||||
"unsupported element type: {}",
|
||||
|
||||
@ -28,6 +28,7 @@
|
||||
#include <roaring/roaring.hh>
|
||||
|
||||
#include "common/FieldDataInterface.h"
|
||||
#include "common/Geometry.h"
|
||||
#include "common/Json.h"
|
||||
#include "common/JsonCastType.h"
|
||||
#include "common/Types.h"
|
||||
@ -17140,3 +17141,522 @@ TEST(JsonNonIndexExistsTest, TestExistsExprSealedNoIndex) {
|
||||
EXPECT_TRUE(result == expect_res);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(ExprTest, TestGISFunction) {
|
||||
using namespace milvus;
|
||||
using namespace milvus::query;
|
||||
using namespace milvus::segcore;
|
||||
|
||||
// Create schema with geometry field
|
||||
auto schema = std::make_shared<Schema>();
|
||||
auto int_fid = schema->AddDebugField("int", DataType::INT64);
|
||||
auto vec_fid = schema->AddDebugField(
|
||||
"fakevec", DataType::VECTOR_FLOAT, 16, knowhere::metric::L2);
|
||||
auto geom_fid = schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
schema->set_primary_field_id(int_fid);
|
||||
|
||||
auto seg = CreateGrowingSegment(schema, empty_index_meta);
|
||||
int N = 1000;
|
||||
int num_iters = 1;
|
||||
|
||||
// Generate test data
|
||||
for (int iter = 0; iter < num_iters; ++iter) {
|
||||
auto raw_data = DataGen(schema, N, iter);
|
||||
seg->PreInsert(N);
|
||||
seg->Insert(iter * N,
|
||||
N,
|
||||
raw_data.row_ids_.data(),
|
||||
raw_data.timestamps_.data(),
|
||||
raw_data.raw_);
|
||||
}
|
||||
|
||||
auto seg_promote = dynamic_cast<SegmentGrowingImpl*>(seg.get());
|
||||
|
||||
// Define GIS test cases using struct like JSON tests
|
||||
struct GISTestcase {
|
||||
std::string wkt_string;
|
||||
proto::plan::GISFunctionFilterExpr_GISOp op;
|
||||
};
|
||||
|
||||
std::vector<GISTestcase> testcases = {
|
||||
{"POINT(0 0)", proto::plan::GISFunctionFilterExpr_GISOp_Intersects},
|
||||
{"POLYGON((-1 -1, 1 -1, 1 1, -1 1, -1 -1))",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Contains},
|
||||
{"LINESTRING(-2 0, 2 0)",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Crosses},
|
||||
{"POINT(10 10)", proto::plan::GISFunctionFilterExpr_GISOp_Equals},
|
||||
{"POLYGON((5 5, 15 5, 15 15, 5 15, 5 5))",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Touches},
|
||||
{"POLYGON((0.5 0.5, 1.5 0.5, 1.5 1.5, 0.5 1.5, 0.5 0.5))",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Overlaps},
|
||||
{"POLYGON((-10 -10, 10 -10, 10 10, -10 10, -10 -10))",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Within}};
|
||||
|
||||
for (const auto& testcase : testcases) {
|
||||
// Create GIS expression
|
||||
auto gis_expr = std::make_shared<milvus::expr::GISFunctionFilterExpr>(
|
||||
milvus::expr::ColumnInfo(geom_fid, DataType::GEOMETRY),
|
||||
testcase.op,
|
||||
testcase.wkt_string);
|
||||
|
||||
auto plan = std::make_shared<plan::FilterBitsNode>(DEFAULT_PLANNODE_ID,
|
||||
gis_expr);
|
||||
|
||||
// Verify query execution doesn't throw exceptions
|
||||
ASSERT_NO_THROW({
|
||||
BitsetType final = ExecuteQueryExpr(
|
||||
plan, seg_promote, N * num_iters, MAX_TIMESTAMP);
|
||||
|
||||
EXPECT_EQ(final.size(), N * num_iters);
|
||||
|
||||
// Verify result is not empty (at least some geometry data satisfies conditions)
|
||||
bool has_true_result = false;
|
||||
for (int i = 0; i < final.size(); ++i) {
|
||||
if (final[i]) {
|
||||
has_true_result = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Note: Since we use random data, all results might be false, which is normal
|
||||
// We mainly verify the function execution doesn't crash
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
TEST(ExprTest, SealedSegmentAllOperators) {
|
||||
// 1. Build schema with geometry field and primary key
|
||||
auto schema = std::make_shared<Schema>();
|
||||
auto pk_fid = schema->AddDebugField("pk", DataType::INT64);
|
||||
auto geo_fid = schema->AddDebugField("geo", DataType::GEOMETRY);
|
||||
schema->set_primary_field_id(pk_fid);
|
||||
|
||||
// 2. Generate random data and load into a sealed segment
|
||||
const int64_t N = 1000;
|
||||
auto dataset = DataGen(schema, N);
|
||||
auto seg = CreateSealedWithFieldDataLoaded(schema, dataset);
|
||||
|
||||
// 3. Prepare (op, wkt) pairs to hit every GIS operator
|
||||
std::vector<
|
||||
std::pair<proto::plan::GISFunctionFilterExpr_GISOp, std::string>>
|
||||
test_cases = {
|
||||
{proto::plan::GISFunctionFilterExpr_GISOp_Equals, "POINT(0 0)"},
|
||||
{proto::plan::GISFunctionFilterExpr_GISOp_Touches,
|
||||
"POLYGON((-1 -1, -1 1, 1 1, 1 -1, -1 -1))"},
|
||||
{proto::plan::GISFunctionFilterExpr_GISOp_Overlaps,
|
||||
"POLYGON((0 0, 2 0, 2 2, 0 2, 0 0))"},
|
||||
{proto::plan::GISFunctionFilterExpr_GISOp_Crosses,
|
||||
"LINESTRING(-1 0, 1 0)"},
|
||||
{proto::plan::GISFunctionFilterExpr_GISOp_Contains,
|
||||
"POLYGON((-2 -2, 2 -2, 2 2, -2 2, -2 -2))"},
|
||||
{proto::plan::GISFunctionFilterExpr_GISOp_Intersects, "POINT(1 1)"},
|
||||
{proto::plan::GISFunctionFilterExpr_GISOp_Within,
|
||||
"POLYGON((-5 -5, 5 -5, 5 5, -5 5, -5 -5))"},
|
||||
};
|
||||
|
||||
for (const auto& [op, wkt] : test_cases) {
|
||||
// Create expression & plan node
|
||||
auto gis_expr = std::make_shared<expr::GISFunctionFilterExpr>(
|
||||
expr::ColumnInfo(geo_fid, DataType::GEOMETRY), op, wkt);
|
||||
auto plan_node = std::make_shared<plan::FilterBitsNode>(
|
||||
DEFAULT_PLANNODE_ID, gis_expr);
|
||||
|
||||
// Execute expression over the sealed segment
|
||||
BitsetType result =
|
||||
ExecuteQueryExpr(plan_node, seg.get(), N, MAX_TIMESTAMP);
|
||||
|
||||
// Validate basic expectations: bitset size should equal N
|
||||
ASSERT_EQ(result.size(), N);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_P(ExprTest, TestGISFunctionWithControlledData) {
|
||||
using namespace milvus;
|
||||
using namespace milvus::query;
|
||||
using namespace milvus::segcore;
|
||||
|
||||
// Create schema with geometry field
|
||||
auto schema = std::make_shared<Schema>();
|
||||
auto int_fid = schema->AddDebugField("int", DataType::INT64);
|
||||
auto vec_fid = schema->AddDebugField(
|
||||
"fakevec", DataType::VECTOR_FLOAT, 16, knowhere::metric::L2);
|
||||
auto geom_fid = schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
schema->set_primary_field_id(int_fid);
|
||||
|
||||
auto seg = CreateGrowingSegment(schema, empty_index_meta);
|
||||
int N = 100;
|
||||
int num_iters = 1;
|
||||
|
||||
// Generate controlled test data
|
||||
for (int iter = 0; iter < num_iters; ++iter) {
|
||||
auto raw_data = DataGen(schema, N, iter);
|
||||
|
||||
// Replace geometry data with controlled test data
|
||||
milvus::proto::schema::FieldData* geometry_field_data = nullptr;
|
||||
for (auto& fd : *raw_data.raw_->mutable_fields_data()) {
|
||||
if (fd.field_id() == geom_fid.get()) {
|
||||
geometry_field_data = &fd;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert(geometry_field_data != nullptr);
|
||||
geometry_field_data->mutable_scalars()
|
||||
->mutable_geometry_data()
|
||||
->clear_data();
|
||||
|
||||
// Create some controlled geometry data for testing
|
||||
auto ctx = GEOS_init_r();
|
||||
for (int i = 0; i < N; ++i) {
|
||||
const char* wkt = nullptr;
|
||||
|
||||
if (i % 4 == 0) {
|
||||
// Create point (0, 0)
|
||||
wkt = "POINT (0.0 0.0)";
|
||||
} else if (i % 4 == 1) {
|
||||
// Create polygon containing (0, 0)
|
||||
wkt =
|
||||
"POLYGON ((-1.0 -1.0, 1.0 -1.0, 1.0 1.0, -1.0 1.0, -1.0 "
|
||||
"-1.0))";
|
||||
} else if (i % 4 == 2) {
|
||||
// Create polygon not containing (0, 0)
|
||||
wkt =
|
||||
"POLYGON ((10.0 10.0, 20.0 10.0, 20.0 20.0, 10.0 20.0, "
|
||||
"10.0 10.0))";
|
||||
} else {
|
||||
// Create line passing through (0, 0)
|
||||
wkt = "LINESTRING (-1.0 0.0, 1.0 0.0)";
|
||||
}
|
||||
|
||||
// Create Geometry and convert to WKB format
|
||||
Geometry geom(ctx, wkt);
|
||||
std::string wkb_string = geom.to_wkb_string();
|
||||
|
||||
geometry_field_data->mutable_scalars()
|
||||
->mutable_geometry_data()
|
||||
->add_data(wkb_string);
|
||||
}
|
||||
GEOS_finish_r(ctx);
|
||||
|
||||
seg->PreInsert(N);
|
||||
seg->Insert(iter * N,
|
||||
N,
|
||||
raw_data.row_ids_.data(),
|
||||
raw_data.timestamps_.data(),
|
||||
raw_data.raw_);
|
||||
}
|
||||
|
||||
auto seg_promote = dynamic_cast<SegmentGrowingImpl*>(seg.get());
|
||||
|
||||
// Test specific GIS operations
|
||||
auto test_gis_operation = [&](const std::string& wkt,
|
||||
proto::plan::GISFunctionFilterExpr_GISOp op,
|
||||
std::function<bool(int)> expected_func) {
|
||||
// Create GIS expression directly
|
||||
auto gis_expr = std::make_shared<milvus::expr::GISFunctionFilterExpr>(
|
||||
milvus::expr::ColumnInfo(geom_fid, DataType::GEOMETRY), op, wkt);
|
||||
|
||||
auto plan = std::make_shared<plan::FilterBitsNode>(DEFAULT_PLANNODE_ID,
|
||||
gis_expr);
|
||||
|
||||
BitsetType final =
|
||||
ExecuteQueryExpr(plan, seg_promote, N * num_iters, MAX_TIMESTAMP);
|
||||
|
||||
EXPECT_EQ(final.size(), N * num_iters);
|
||||
|
||||
// Verify results
|
||||
for (int i = 0; i < N * num_iters; ++i) {
|
||||
auto ans = final[i];
|
||||
auto expected = expected_func(i);
|
||||
ASSERT_EQ(ans, expected) << "GIS operation failed at index " << i;
|
||||
}
|
||||
};
|
||||
|
||||
// Test contains operation
|
||||
test_gis_operation("POLYGON((-2 -2, 2 -2, 2 2, -2 2, -2 -2))",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Within,
|
||||
[](int i) -> bool {
|
||||
// Only geometry at index 0,1,3 (polygon containing (0,0))
|
||||
return (i % 4 == 0) || (i % 4 == 1) || (i % 4 == 3);
|
||||
});
|
||||
|
||||
// Test intersects operation
|
||||
test_gis_operation("POINT(0 0)",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Intersects,
|
||||
[](int i) -> bool {
|
||||
// Point at index 0 (0,0), polygon at index 1, line at index 3 should all intersect with point (0,0)
|
||||
return (i % 4 == 0) || (i % 4 == 1) || (i % 4 == 3);
|
||||
});
|
||||
|
||||
// Test equals operation
|
||||
test_gis_operation("POINT(0 0)",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Equals,
|
||||
[](int i) -> bool {
|
||||
// Only point at index 0 (0,0) should be equal
|
||||
return (i % 4 == 0);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_P(ExprTest, TestSTDWithinFunction) {
|
||||
using namespace milvus;
|
||||
using namespace milvus::query;
|
||||
using namespace milvus::segcore;
|
||||
|
||||
// Create schema with geometry field
|
||||
auto schema = std::make_shared<Schema>();
|
||||
auto int_fid = schema->AddDebugField("int", DataType::INT64);
|
||||
auto vec_fid = schema->AddDebugField(
|
||||
"fakevec", DataType::VECTOR_FLOAT, 16, knowhere::metric::L2);
|
||||
auto geom_fid = schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
schema->set_primary_field_id(int_fid);
|
||||
|
||||
auto seg = CreateGrowingSegment(schema, empty_index_meta);
|
||||
int N = 100;
|
||||
int num_iters = 1;
|
||||
|
||||
// Generate controlled test data with known distances
|
||||
for (int iter = 0; iter < num_iters; ++iter) {
|
||||
auto raw_data = DataGen(schema, N, iter);
|
||||
|
||||
// Replace geometry data with controlled test data for distance testing
|
||||
milvus::proto::schema::FieldData* geometry_field_data = nullptr;
|
||||
for (auto& fd : *raw_data.raw_->mutable_fields_data()) {
|
||||
if (fd.field_id() == geom_fid.get()) {
|
||||
geometry_field_data = &fd;
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert(geometry_field_data != nullptr);
|
||||
geometry_field_data->mutable_scalars()
|
||||
->mutable_geometry_data()
|
||||
->clear_data();
|
||||
|
||||
// Create test points at known distances from origin (0,0)
|
||||
auto ctx = GEOS_init_r();
|
||||
for (int i = 0; i < N; ++i) {
|
||||
const char* wkt = nullptr;
|
||||
|
||||
if (i % 5 == 0) {
|
||||
// Distance 0: Point at origin
|
||||
wkt = "POINT (0.0 0.0)";
|
||||
} else if (i % 5 == 1) {
|
||||
// Distance 1: Point at (1,0)
|
||||
wkt = "POINT (1.0 0.0)";
|
||||
} else if (i % 5 == 2) {
|
||||
// Distance 5: Point at (3,4) - Pythagorean triple
|
||||
wkt = "POINT (3.0 4.0)";
|
||||
} else if (i % 5 == 3) {
|
||||
// Distance 10: Point at (6,8)
|
||||
wkt = "POINT (6.0 8.0)";
|
||||
} else {
|
||||
// Distance 13: Point at (5,12)
|
||||
wkt = "POINT (5.0 12.0)";
|
||||
}
|
||||
|
||||
// Create Geometry and convert to WKB format
|
||||
Geometry geom(ctx, wkt);
|
||||
std::string wkb_string = geom.to_wkb_string();
|
||||
|
||||
geometry_field_data->mutable_scalars()
|
||||
->mutable_geometry_data()
|
||||
->add_data(wkb_string);
|
||||
}
|
||||
GEOS_finish_r(ctx);
|
||||
|
||||
seg->PreInsert(N);
|
||||
seg->Insert(iter * N,
|
||||
N,
|
||||
raw_data.row_ids_.data(),
|
||||
raw_data.timestamps_.data(),
|
||||
raw_data.raw_);
|
||||
}
|
||||
|
||||
auto seg_promote = dynamic_cast<SegmentInternalInterface*>(seg.get());
|
||||
|
||||
// Test ST_DWITHIN operations with different distances
|
||||
auto test_dwithin_operation = [&](const std::string& center_wkt,
|
||||
double distance,
|
||||
std::function<bool(int)> expected_func) {
|
||||
// Create ST_DWITHIN expression
|
||||
auto dwithin_expr =
|
||||
std::make_shared<milvus::expr::GISFunctionFilterExpr>(
|
||||
milvus::expr::ColumnInfo(geom_fid, DataType::GEOMETRY),
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_DWithin,
|
||||
center_wkt,
|
||||
distance);
|
||||
|
||||
auto plan = std::make_shared<plan::FilterBitsNode>(DEFAULT_PLANNODE_ID,
|
||||
dwithin_expr);
|
||||
|
||||
BitsetType final =
|
||||
ExecuteQueryExpr(plan, seg_promote, N * num_iters, MAX_TIMESTAMP);
|
||||
|
||||
EXPECT_EQ(final.size(), N * num_iters);
|
||||
|
||||
// Verify results match expectations
|
||||
for (int i = 0; i < final.size(); ++i) {
|
||||
bool expected = expected_func(i);
|
||||
EXPECT_EQ(final[i], expected)
|
||||
<< "Mismatch at index " << i << " for distance " << distance
|
||||
<< ": expected " << expected << ", got " << final[i];
|
||||
}
|
||||
};
|
||||
|
||||
// Test distance 0.5 - only origin point should match
|
||||
test_dwithin_operation("POINT(0 0)", 55660.0, [](int i) -> bool {
|
||||
return (i % 5 == 0); // Only points at distance 0
|
||||
});
|
||||
|
||||
// Test distance 1.5 - origin and distance-1 points should match
|
||||
test_dwithin_operation("POINT(0 0)", 166980.0, [](int i) -> bool {
|
||||
return (i % 5 == 0) || (i % 5 == 1); // Distance 0 and 1
|
||||
});
|
||||
|
||||
// Test distance 7.0 - origin, distance-1, and distance-5 points should match
|
||||
test_dwithin_operation("POINT(0 0)", 779240.0, [](int i) -> bool {
|
||||
return (i % 5 == 0) || (i % 5 == 1) ||
|
||||
(i % 5 == 2); // Distance 0, 1, 5
|
||||
});
|
||||
|
||||
// Test distance 12.0 - all but distance-13 points should match
|
||||
test_dwithin_operation("POINT(0 0)", 1335840.0, [](int i) -> bool {
|
||||
return (i % 5 != 4); // All except distance 13
|
||||
});
|
||||
|
||||
// Test distance 15.0 - all points should match
|
||||
test_dwithin_operation("POINT(0 0)", 1669800.0, [](int i) -> bool {
|
||||
return true; // All points
|
||||
});
|
||||
|
||||
// Test with different center point
|
||||
test_dwithin_operation("POINT(1 0)", 11132.0, [](int i) -> bool {
|
||||
return (i % 5 == 1); // Only the point at (1,0)
|
||||
});
|
||||
|
||||
// Test edge cases
|
||||
test_dwithin_operation("POINT(0 0)", 111320.0, [](int i) -> bool {
|
||||
return (i % 5 == 0) ||
|
||||
(i % 5 == 1); // Distance exactly 1.0 should be included
|
||||
});
|
||||
|
||||
test_dwithin_operation("POINT(0 0)", 556600.0, [](int i) -> bool {
|
||||
return (i % 5 == 0) || (i % 5 == 1) ||
|
||||
(i % 5 == 2); // Distance exactly 5.0 should be included
|
||||
});
|
||||
}
|
||||
|
||||
TEST_P(ExprTest, ParseGISFunctionFilterExprs) {
|
||||
// Build Schema
|
||||
auto schema = std::make_shared<Schema>();
|
||||
auto dim = 16;
|
||||
auto vec_id = schema->AddDebugField(
|
||||
"vec", DataType::VECTOR_FLOAT, dim, knowhere::metric::L2);
|
||||
auto geo_id = schema->AddDebugField("geo", DataType::GEOMETRY);
|
||||
auto pk_id = schema->AddDebugField("pk", DataType::INT64);
|
||||
schema->set_primary_field_id(pk_id);
|
||||
|
||||
// Generate data and load
|
||||
int64_t N = 1000;
|
||||
auto dataset = DataGen(schema, N);
|
||||
auto seg = CreateSealedWithFieldDataLoaded(schema, dataset);
|
||||
|
||||
// Test plan with gisfunction_filter_expr
|
||||
std::string raw_plan = R"PLAN(vector_anns: <
|
||||
field_id: 100
|
||||
predicates: <
|
||||
gisfunction_filter_expr: <
|
||||
column_info: <
|
||||
field_id: 101
|
||||
data_type: Geometry
|
||||
>
|
||||
op: Within
|
||||
wkt_string: "POLYGON((0 0,1 0,1 1,0 1,0 0))"
|
||||
>
|
||||
>
|
||||
query_info: <
|
||||
topk: 5
|
||||
metric_type: "L2"
|
||||
round_decimal: 3
|
||||
search_params: "{\"nprobe\":10}"
|
||||
>
|
||||
placeholder_tag: "$0"
|
||||
>)PLAN";
|
||||
|
||||
// Convert and parse
|
||||
auto bin_plan = translate_text_plan_with_metric_type(raw_plan);
|
||||
auto plan =
|
||||
CreateSearchPlanByExpr(schema, bin_plan.data(), bin_plan.size());
|
||||
|
||||
// If parsing fails, test will fail with exception
|
||||
// If parsing succeeds, ParseGISFunctionFilterExprs is covered
|
||||
|
||||
// Execute search to verify execution logic
|
||||
|
||||
auto ph_raw = CreatePlaceholderGroup(5, dim, 123);
|
||||
auto ph_grp = ParsePlaceholderGroup(plan.get(), ph_raw.SerializeAsString());
|
||||
auto sr = seg->Search(plan.get(), ph_grp.get(), MAX_TIMESTAMP);
|
||||
}
|
||||
|
||||
TEST(ExprTest, ParseGISFunctionFilterExprsMultipleOps) {
|
||||
// Build Schema
|
||||
auto schema = std::make_shared<Schema>();
|
||||
auto dim = 16;
|
||||
auto vec_id = schema->AddDebugField(
|
||||
"vec", DataType::VECTOR_FLOAT, dim, knowhere::metric::L2);
|
||||
auto geo_id = schema->AddDebugField("geo", DataType::GEOMETRY);
|
||||
auto pk_id = schema->AddDebugField("pk", DataType::INT64);
|
||||
schema->set_primary_field_id(pk_id);
|
||||
|
||||
// Generate data and load
|
||||
int64_t N = 1000;
|
||||
auto dataset = DataGen(schema, N);
|
||||
auto seg = CreateSealedWithFieldDataLoaded(schema, dataset);
|
||||
|
||||
// Test different GIS operations
|
||||
std::vector<std::pair<std::string, std::string>> test_cases = {
|
||||
{"Within", "POLYGON((0 0,1 0,1 1,0 1,0 0))"},
|
||||
{"Contains", "POINT(0.5 0.5)"},
|
||||
{"Intersects", "LINESTRING(0 0,1 1)"},
|
||||
{"Equals", "POINT(0 0)"},
|
||||
{"Touches", "POLYGON((10 10,11 10,11 11,10 11,10 10))"}};
|
||||
|
||||
for (const auto& test_case : test_cases) {
|
||||
const auto& op = test_case.first;
|
||||
const auto& wkt = test_case.second;
|
||||
|
||||
std::string raw_plan = R"(
|
||||
vector_anns: <
|
||||
field_id: 100
|
||||
predicates: <
|
||||
gisfunction_filter_expr: <
|
||||
column_info: <
|
||||
field_id: 101
|
||||
data_type: Geometry
|
||||
>
|
||||
op: )" +
|
||||
op + R"(
|
||||
wkt_string: ")" +
|
||||
wkt + R"("
|
||||
>
|
||||
>
|
||||
query_info: <
|
||||
topk: 5
|
||||
metric_type: "L2"
|
||||
round_decimal: 3
|
||||
search_params: "{\"nprobe\":10}"
|
||||
>
|
||||
placeholder_tag: "$0"
|
||||
>
|
||||
)";
|
||||
|
||||
// Convert and parse
|
||||
auto bin_plan = translate_text_plan_to_binary_plan(raw_plan.c_str());
|
||||
auto plan =
|
||||
CreateSearchPlanByExpr(schema, bin_plan.data(), bin_plan.size());
|
||||
|
||||
// Execute search to verify execution logic
|
||||
auto ph_raw = CreatePlaceholderGroup(5, dim, 123);
|
||||
auto ph_grp =
|
||||
ParsePlaceholderGroup(plan.get(), ph_raw.SerializeAsString());
|
||||
auto sr = seg->Search(plan.get(), ph_grp.get(), MAX_TIMESTAMP);
|
||||
EXPECT_EQ(sr->total_nq_, 5) << "Failed for operation: " << op;
|
||||
}
|
||||
}
|
||||
|
||||
450
internal/core/src/exec/expression/GISFunctionFilterExpr.cpp
Normal file
450
internal/core/src/exec/expression/GISFunctionFilterExpr.cpp
Normal file
@ -0,0 +1,450 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#include "GISFunctionFilterExpr.h"
|
||||
#include <cstdlib>
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/Geometry.h"
|
||||
#include "common/Types.h"
|
||||
#include "pb/plan.pb.h"
|
||||
#include <cmath>
|
||||
#include <fmt/core.h>
|
||||
namespace milvus {
|
||||
namespace exec {
|
||||
|
||||
#define GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(_DataType, method) \
|
||||
auto execute_sub_batch = [this](const _DataType* data, \
|
||||
const bool* valid_data, \
|
||||
const int32_t* offsets, \
|
||||
const int32_t* segment_offsets, \
|
||||
const int size, \
|
||||
TargetBitmapView res, \
|
||||
TargetBitmapView valid_res, \
|
||||
const Geometry& right_source) { \
|
||||
AssertInfo(segment_offsets != nullptr, \
|
||||
"segment_offsets should not be nullptr"); \
|
||||
/* Unified path using simple WKB-content-based cache for both sealed and growing segments. */ \
|
||||
auto& geometry_cache = \
|
||||
SimpleGeometryCacheManager::Instance().GetCache( \
|
||||
this->segment_->get_segment_id(), field_id_); \
|
||||
auto cache_lock = geometry_cache.AcquireReadLock(); \
|
||||
for (int i = 0; i < size; ++i) { \
|
||||
if (valid_data != nullptr && !valid_data[i]) { \
|
||||
res[i] = valid_res[i] = false; \
|
||||
continue; \
|
||||
} \
|
||||
auto absolute_offset = segment_offsets[i]; \
|
||||
auto cached_geometry = \
|
||||
geometry_cache.GetByOffsetUnsafe(absolute_offset); \
|
||||
AssertInfo(cached_geometry != nullptr, \
|
||||
"cached geometry is nullptr"); \
|
||||
res[i] = cached_geometry->method(right_source); \
|
||||
} \
|
||||
}; \
|
||||
int64_t processed_size = ProcessDataChunks<_DataType, true>( \
|
||||
execute_sub_batch, std::nullptr_t{}, res, valid_res, right_source); \
|
||||
AssertInfo(processed_size == real_batch_size, \
|
||||
"internal error: expr processed rows {} not equal " \
|
||||
"expect batch size {}", \
|
||||
processed_size, \
|
||||
real_batch_size); \
|
||||
return res_vec;
|
||||
|
||||
// Specialized macro for distance-based operations (ST_DWITHIN)
|
||||
#define GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON_DISTANCE(_DataType, method) \
|
||||
auto execute_sub_batch = [this](const _DataType* data, \
|
||||
const bool* valid_data, \
|
||||
const int32_t* offsets, \
|
||||
const int32_t* segment_offsets, \
|
||||
const int size, \
|
||||
TargetBitmapView res, \
|
||||
TargetBitmapView valid_res, \
|
||||
const Geometry& right_source) { \
|
||||
AssertInfo(segment_offsets != nullptr, \
|
||||
"segment_offsets should not be nullptr"); \
|
||||
auto& geometry_cache = \
|
||||
SimpleGeometryCacheManager::Instance().GetCache( \
|
||||
this->segment_->get_segment_id(), field_id_); \
|
||||
auto cache_lock = geometry_cache.AcquireReadLock(); \
|
||||
for (int i = 0; i < size; ++i) { \
|
||||
if (valid_data != nullptr && !valid_data[i]) { \
|
||||
res[i] = valid_res[i] = false; \
|
||||
continue; \
|
||||
} \
|
||||
auto absolute_offset = segment_offsets[i]; \
|
||||
auto cached_geometry = \
|
||||
geometry_cache.GetByOffsetUnsafe(absolute_offset); \
|
||||
AssertInfo(cached_geometry != nullptr, \
|
||||
"cached geometry is nullptr"); \
|
||||
res[i] = cached_geometry->method(right_source, expr_->distance_); \
|
||||
} \
|
||||
}; \
|
||||
int64_t processed_size = ProcessDataChunks<_DataType, true>( \
|
||||
execute_sub_batch, std::nullptr_t{}, res, valid_res, right_source); \
|
||||
AssertInfo(processed_size == real_batch_size, \
|
||||
"internal error: expr processed rows {} not equal " \
|
||||
"expect batch size {}", \
|
||||
processed_size, \
|
||||
real_batch_size); \
|
||||
return res_vec;
|
||||
void
|
||||
PhyGISFunctionFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
|
||||
AssertInfo(expr_->column_.data_type_ == DataType::GEOMETRY,
|
||||
"unsupported data type: {}",
|
||||
expr_->column_.data_type_);
|
||||
if (SegmentExpr::CanUseIndex()) {
|
||||
result = EvalForIndexSegment();
|
||||
} else {
|
||||
result = EvalForDataSegment();
|
||||
}
|
||||
}
|
||||
|
||||
VectorPtr
|
||||
PhyGISFunctionFilterExpr::EvalForDataSegment() {
|
||||
auto real_batch_size = GetNextBatchSize();
|
||||
if (real_batch_size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
auto res_vec = std::make_shared<ColumnVector>(
|
||||
TargetBitmap(real_batch_size), TargetBitmap(real_batch_size));
|
||||
TargetBitmapView res(res_vec->GetRawData(), real_batch_size);
|
||||
TargetBitmapView valid_res(res_vec->GetValidRawData(), real_batch_size);
|
||||
valid_res.set();
|
||||
|
||||
auto right_source =
|
||||
Geometry(segment_->get_ctx(), expr_->geometry_wkt_.c_str());
|
||||
|
||||
// Choose underlying data type according to segment type to avoid element
|
||||
// size mismatch: Sealed segment variable column stores std::string_view;
|
||||
// Growing segment stores std::string.
|
||||
using SealedType = std::string_view;
|
||||
using GrowingType = std::string;
|
||||
|
||||
switch (expr_->op_) {
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Equals: {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(SealedType, equals);
|
||||
} else {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(GrowingType, equals);
|
||||
}
|
||||
}
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Touches: {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(SealedType, touches);
|
||||
} else {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(GrowingType,
|
||||
touches);
|
||||
}
|
||||
}
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Overlaps: {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(SealedType,
|
||||
overlaps);
|
||||
} else {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(GrowingType,
|
||||
overlaps);
|
||||
}
|
||||
}
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Crosses: {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(SealedType, crosses);
|
||||
} else {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(GrowingType,
|
||||
crosses);
|
||||
}
|
||||
}
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Contains: {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(SealedType,
|
||||
contains);
|
||||
} else {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(GrowingType,
|
||||
contains);
|
||||
}
|
||||
}
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Intersects: {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(SealedType,
|
||||
intersects);
|
||||
} else {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(GrowingType,
|
||||
intersects);
|
||||
}
|
||||
}
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Within: {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(SealedType, within);
|
||||
} else {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON(GrowingType, within);
|
||||
}
|
||||
}
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_DWithin: {
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON_DISTANCE(SealedType,
|
||||
dwithin);
|
||||
} else {
|
||||
GEOMETRY_EXECUTE_SUB_BATCH_WITH_COMPARISON_DISTANCE(GrowingType,
|
||||
dwithin);
|
||||
}
|
||||
}
|
||||
default: {
|
||||
ThrowInfo(NotImplemented,
|
||||
"internal error: unknown GIS op : {}",
|
||||
expr_->op_);
|
||||
}
|
||||
}
|
||||
return res_vec;
|
||||
}
|
||||
|
||||
// Helper function to calculate bounding box for range_within query optimization
|
||||
// Creates a rectangular bounding box around a query point with given distance in meters
|
||||
static Geometry
|
||||
create_bounding_box_for_dwithin(GEOSContextHandle_t ctx,
|
||||
const Geometry& query_point,
|
||||
double distance_meters) {
|
||||
double query_lon, query_lat;
|
||||
|
||||
AssertInfo(GEOSGeomGetX_r(ctx, query_point.GetGeometry(), &query_lon) == 1,
|
||||
"Failed to get X coordinate from query point");
|
||||
AssertInfo(GEOSGeomGetY_r(ctx, query_point.GetGeometry(), &query_lat) == 1,
|
||||
"Failed to get Y coordinate from query point");
|
||||
|
||||
const double metersPerDegreeLat = 111320.0;
|
||||
|
||||
// Calculate latitude offset (relatively constant)
|
||||
double latOffset = distance_meters / metersPerDegreeLat;
|
||||
|
||||
// Calculate longitude offset (varies with latitude)
|
||||
double latRad = query_lat * M_PI / 180.0;
|
||||
double lonOffset =
|
||||
distance_meters / (metersPerDegreeLat * std::cos(latRad));
|
||||
|
||||
// Calculate bounding box coordinates
|
||||
double minLon = query_lon - lonOffset;
|
||||
double maxLon = query_lon + lonOffset;
|
||||
double minLat = query_lat - latOffset;
|
||||
double maxLat = query_lat + latOffset;
|
||||
|
||||
// Create WKT POLYGON for bounding box
|
||||
std::string bboxWKT = fmt::format(
|
||||
"POLYGON(({:.6f} {:.6f}, {:.6f} {:.6f}, {:.6f} {:.6f}, {:.6f} {:.6f}, "
|
||||
"{:.6f} {:.6f}))",
|
||||
minLon,
|
||||
minLat, // Bottom-left
|
||||
maxLon,
|
||||
minLat, // Bottom-right
|
||||
maxLon,
|
||||
maxLat, // Top-right
|
||||
minLon,
|
||||
maxLat, // Top-left
|
||||
minLon,
|
||||
minLat // Close the ring
|
||||
);
|
||||
|
||||
return Geometry(ctx, bboxWKT.c_str());
|
||||
}
|
||||
|
||||
VectorPtr
|
||||
PhyGISFunctionFilterExpr::EvalForIndexSegment() {
|
||||
AssertInfo(num_index_chunk_ == 1, "num_index_chunk_ should be 1");
|
||||
auto real_batch_size = GetNextBatchSize();
|
||||
if (real_batch_size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
Geometry query_geometry =
|
||||
Geometry(segment_->get_ctx(), expr_->geometry_wkt_.c_str());
|
||||
|
||||
/* ------------------------------------------------------------------
|
||||
* Prefetch: if coarse results are not cached yet, run a single R-Tree
|
||||
* query for all index chunks and cache their coarse bitmaps.
|
||||
* ------------------------------------------------------------------*/
|
||||
|
||||
auto evaluate_geometry = [this](const Geometry& left,
|
||||
const Geometry& query_geometry) -> bool {
|
||||
switch (expr_->op_) {
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Equals:
|
||||
return left.equals(query_geometry);
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Touches:
|
||||
return left.touches(query_geometry);
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Overlaps:
|
||||
return left.overlaps(query_geometry);
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Crosses:
|
||||
return left.crosses(query_geometry);
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Contains:
|
||||
return left.contains(query_geometry);
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Intersects:
|
||||
return left.intersects(query_geometry);
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_Within:
|
||||
return left.within(query_geometry);
|
||||
case proto::plan::GISFunctionFilterExpr_GISOp_DWithin:
|
||||
return left.dwithin(query_geometry, expr_->distance_);
|
||||
default:
|
||||
ThrowInfo(NotImplemented, "unknown GIS op : {}", expr_->op_);
|
||||
}
|
||||
};
|
||||
|
||||
TargetBitmap batch_result;
|
||||
TargetBitmap batch_valid;
|
||||
int processed_rows = 0;
|
||||
|
||||
if (!coarse_cached_) {
|
||||
using Index = index::ScalarIndex<std::string>;
|
||||
|
||||
// Prepare shared dataset for index query (coarse candidate set by R-Tree)
|
||||
auto ds = std::make_shared<milvus::Dataset>();
|
||||
ds->Set(milvus::index::OPERATOR_TYPE, expr_->op_);
|
||||
|
||||
// For range_within operations, use bounding box for coarse filtering
|
||||
if (expr_->op_ == proto::plan::GISFunctionFilterExpr_GISOp_DWithin) {
|
||||
// Create bounding box geometry for index coarse filtering
|
||||
Geometry bbox_geometry = create_bounding_box_for_dwithin(
|
||||
segment_->get_ctx(), query_geometry, expr_->distance_);
|
||||
|
||||
ds->Set(milvus::index::MATCH_VALUE, bbox_geometry);
|
||||
|
||||
// Note: Distance is not used for bounding box intersection query
|
||||
} else {
|
||||
// For other operations, use original geometry
|
||||
ds->Set(
|
||||
milvus::index::MATCH_VALUE,
|
||||
Geometry(segment_->get_ctx(), expr_->geometry_wkt_.c_str()));
|
||||
}
|
||||
|
||||
// Query segment-level R-Tree index **once** since each chunk shares the same index
|
||||
auto scalar_index = dynamic_cast<const Index*>(pinned_index_[0].get());
|
||||
auto* idx_ptr = const_cast<Index*>(scalar_index);
|
||||
|
||||
{
|
||||
auto tmp = idx_ptr->Query(ds);
|
||||
coarse_global_ = std::move(tmp);
|
||||
}
|
||||
{
|
||||
auto tmp_valid = idx_ptr->IsNotNull();
|
||||
coarse_valid_global_ = std::move(tmp_valid);
|
||||
}
|
||||
|
||||
coarse_cached_ = true;
|
||||
}
|
||||
|
||||
if (cached_index_chunk_res_ == nullptr) {
|
||||
// Reuse segment-level coarse cache directly
|
||||
auto& coarse = coarse_global_;
|
||||
auto& chunk_valid = coarse_valid_global_;
|
||||
// Exact refinement with lambda functions for code reuse
|
||||
TargetBitmap refined(coarse.size());
|
||||
|
||||
// Lambda: Evaluate geometry operation (shared by both segment types)
|
||||
|
||||
// Lambda: Collect hit offsets from coarse bitmap
|
||||
auto collect_hits = [&coarse]() -> std::vector<int64_t> {
|
||||
std::vector<int64_t> hit_offsets;
|
||||
hit_offsets.reserve(coarse.count());
|
||||
for (size_t i = 0; i < coarse.size(); ++i) {
|
||||
if (coarse[i]) {
|
||||
hit_offsets.emplace_back(static_cast<int64_t>(i));
|
||||
}
|
||||
}
|
||||
return hit_offsets;
|
||||
};
|
||||
|
||||
// Lambda: Process sealed segment data using bulk_subscript with SimpleGeometryCache
|
||||
auto process_sealed_data =
|
||||
[&](const std::vector<int64_t>& hit_offsets) {
|
||||
if (hit_offsets.empty())
|
||||
return;
|
||||
|
||||
// Get simple geometry cache for this segment+field
|
||||
auto& geometry_cache =
|
||||
SimpleGeometryCacheManager::Instance().GetCache(
|
||||
segment_->get_segment_id(), field_id_);
|
||||
auto cache_lock = geometry_cache.AcquireReadLock();
|
||||
for (size_t i = 0; i < hit_offsets.size(); ++i) {
|
||||
const auto pos = hit_offsets[i];
|
||||
|
||||
auto cached_geometry =
|
||||
geometry_cache.GetByOffsetUnsafe(pos);
|
||||
// skip invalid geometry
|
||||
if (cached_geometry == nullptr) {
|
||||
continue;
|
||||
}
|
||||
bool result =
|
||||
evaluate_geometry(*cached_geometry, query_geometry);
|
||||
|
||||
if (result) {
|
||||
refined.set(pos);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
auto hit_offsets = collect_hits();
|
||||
process_sealed_data(hit_offsets);
|
||||
|
||||
// Cache refined result for reuse by subsequent batches
|
||||
cached_index_chunk_res_ =
|
||||
std::make_shared<TargetBitmap>(std::move(refined));
|
||||
}
|
||||
|
||||
if (segment_->type() == SegmentType::Sealed) {
|
||||
auto size = ProcessIndexOneChunk(batch_result,
|
||||
batch_valid,
|
||||
0,
|
||||
*cached_index_chunk_res_,
|
||||
coarse_valid_global_,
|
||||
processed_rows);
|
||||
processed_rows += size;
|
||||
current_index_chunk_pos_ = current_index_chunk_pos_ + size;
|
||||
} else {
|
||||
for (size_t i = current_data_chunk_; i < num_data_chunk_; i++) {
|
||||
auto data_pos =
|
||||
(i == current_data_chunk_) ? current_data_chunk_pos_ : 0;
|
||||
int64_t size = segment_->chunk_size(field_id_, i) - data_pos;
|
||||
size = std::min(size, real_batch_size - processed_rows);
|
||||
|
||||
if (size > 0) {
|
||||
batch_result.append(
|
||||
*cached_index_chunk_res_, current_index_chunk_pos_, size);
|
||||
batch_valid.append(
|
||||
coarse_valid_global_, current_index_chunk_pos_, size);
|
||||
}
|
||||
// Update with actual processed size
|
||||
processed_rows += size;
|
||||
current_index_chunk_pos_ += size;
|
||||
|
||||
if (processed_rows >= real_batch_size) {
|
||||
current_data_chunk_ = i;
|
||||
current_data_chunk_pos_ = data_pos + size;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AssertInfo(processed_rows == real_batch_size,
|
||||
"internal error: expr processed rows {} not equal "
|
||||
"expect batch size {}",
|
||||
processed_rows,
|
||||
real_batch_size);
|
||||
AssertInfo(batch_result.size() == real_batch_size,
|
||||
"internal error: expr processed rows {} not equal "
|
||||
"expect batch size {}",
|
||||
batch_result.size(),
|
||||
real_batch_size);
|
||||
AssertInfo(batch_valid.size() == real_batch_size,
|
||||
"internal error: expr processed rows {} not equal "
|
||||
"expect batch size {}",
|
||||
batch_valid.size(),
|
||||
real_batch_size);
|
||||
return std::make_shared<ColumnVector>(std::move(batch_result),
|
||||
std::move(batch_valid));
|
||||
}
|
||||
|
||||
} //namespace exec
|
||||
} // namespace milvus
|
||||
82
internal/core/src/exec/expression/GISFunctionFilterExpr.h
Normal file
82
internal/core/src/exec/expression/GISFunctionFilterExpr.h
Normal file
@ -0,0 +1,82 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <fmt/core.h>
|
||||
#include <memory>
|
||||
|
||||
#include "common/FieldDataInterface.h"
|
||||
#include "common/Vector.h"
|
||||
#include "exec/expression/Expr.h"
|
||||
#include "expr/ITypeExpr.h"
|
||||
#include "segcore/SegmentInterface.h"
|
||||
#include "common/GeometryCache.h"
|
||||
|
||||
namespace milvus {
|
||||
namespace exec {
|
||||
|
||||
class PhyGISFunctionFilterExpr : public SegmentExpr {
|
||||
public:
|
||||
PhyGISFunctionFilterExpr(
|
||||
const std::vector<std::shared_ptr<Expr>>& input,
|
||||
const std::shared_ptr<const milvus::expr::GISFunctionFilterExpr>& expr,
|
||||
const std::string& name,
|
||||
milvus::OpContext* op_ctx,
|
||||
const segcore::SegmentInternalInterface* segment,
|
||||
int64_t active_count,
|
||||
int64_t batch_size,
|
||||
int32_t consistency_level)
|
||||
: SegmentExpr(std::move(input),
|
||||
name,
|
||||
op_ctx,
|
||||
segment,
|
||||
expr->column_.field_id_,
|
||||
expr->column_.nested_path_,
|
||||
DataType::GEOMETRY,
|
||||
active_count,
|
||||
batch_size,
|
||||
consistency_level),
|
||||
expr_(expr) {
|
||||
}
|
||||
|
||||
void
|
||||
Eval(EvalCtx& context, VectorPtr& result) override;
|
||||
|
||||
std::optional<milvus::expr::ColumnInfo>
|
||||
GetColumnInfo() const override {
|
||||
return expr_->column_;
|
||||
}
|
||||
|
||||
private:
|
||||
VectorPtr
|
||||
EvalForIndexSegment();
|
||||
|
||||
VectorPtr
|
||||
EvalForDataSegment();
|
||||
|
||||
private:
|
||||
std::shared_ptr<const milvus::expr::GISFunctionFilterExpr> expr_;
|
||||
|
||||
/*
|
||||
* Segment-level cache: run a single R-Tree Query for all index chunks to
|
||||
* obtain coarse candidate bitmaps. Subsequent batches reuse these cached
|
||||
* results to avoid repeated ScalarIndex::Query calls per chunk.
|
||||
*/
|
||||
// whether coarse results have been prefetched once
|
||||
bool coarse_cached_ = false;
|
||||
// global coarse bitmap (segment-level)
|
||||
TargetBitmap coarse_global_;
|
||||
// global not-null bitmap (segment-level)
|
||||
TargetBitmap coarse_valid_global_;
|
||||
};
|
||||
} //namespace exec
|
||||
} // namespace milvus
|
||||
@ -75,6 +75,17 @@ PhyNullExpr::Eval(EvalCtx& context, VectorPtr& result) {
|
||||
result = ExecVisitorImpl<ArrayView>(input);
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
if (segment_->type() == SegmentType::Growing &&
|
||||
!storage::MmapManager::GetInstance()
|
||||
.GetMmapConfig()
|
||||
.growing_enable_mmap) {
|
||||
result = ExecVisitorImpl<std::string>(input);
|
||||
} else {
|
||||
result = ExecVisitorImpl<std::string_view>(input);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
ThrowInfo(DataTypeInvalid,
|
||||
"unsupported data type: {}",
|
||||
|
||||
@ -21,6 +21,7 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "exec/expression/function/FunctionFactory.h"
|
||||
#include "common/Exception.h"
|
||||
#include "common/Schema.h"
|
||||
@ -803,6 +804,43 @@ class CompareExpr : public ITypeFilterExpr {
|
||||
const proto::plan::OpType op_type_;
|
||||
};
|
||||
|
||||
class GISFunctionFilterExpr : public ITypeFilterExpr {
|
||||
public:
|
||||
GISFunctionFilterExpr(ColumnInfo cloumn,
|
||||
GISFunctionType op,
|
||||
const std::string& geometry_wkt,
|
||||
double distance = 0.0)
|
||||
: column_(cloumn),
|
||||
op_(op),
|
||||
geometry_wkt_(geometry_wkt),
|
||||
distance_(distance){};
|
||||
std::string
|
||||
ToString() const override {
|
||||
if (op_ == proto::plan::GISFunctionFilterExpr_GISOp_DWithin) {
|
||||
return fmt::format(
|
||||
"GISFunctionFilterExpr:[Column: {}, Operator: {} "
|
||||
"WktValue: {}, Distance: {}]",
|
||||
column_.ToString(),
|
||||
GISFunctionFilterExpr_GISOp_Name(op_),
|
||||
geometry_wkt_,
|
||||
distance_);
|
||||
} else {
|
||||
return fmt::format(
|
||||
"GISFunctionFilterExpr:[Column: {}, Operator: {} "
|
||||
"WktValue: {}]",
|
||||
column_.ToString(),
|
||||
GISFunctionFilterExpr_GISOp_Name(op_),
|
||||
geometry_wkt_);
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
const ColumnInfo column_;
|
||||
const GISFunctionType op_;
|
||||
const std::string geometry_wkt_;
|
||||
const double distance_;
|
||||
};
|
||||
|
||||
class JsonContainsExpr : public ITypeFilterExpr {
|
||||
public:
|
||||
JsonContainsExpr(ColumnInfo column,
|
||||
|
||||
@ -37,6 +37,7 @@
|
||||
#include "index/BoolIndex.h"
|
||||
#include "index/InvertedIndexTantivy.h"
|
||||
#include "index/HybridScalarIndex.h"
|
||||
#include "index/RTreeIndex.h"
|
||||
#include "knowhere/comp/knowhere_check.h"
|
||||
#include "log/Log.h"
|
||||
#include "pb/schema.pb.h"
|
||||
@ -309,7 +310,8 @@ IndexFactory::ScalarIndexLoadResource(
|
||||
}
|
||||
request.has_raw_data = true;
|
||||
} else if (index_type == milvus::index::INVERTED_INDEX_TYPE ||
|
||||
index_type == milvus::index::NGRAM_INDEX_TYPE) {
|
||||
index_type == milvus::index::NGRAM_INDEX_TYPE ||
|
||||
index_type == milvus::index::RTREE_INDEX_TYPE) {
|
||||
request.final_memory_cost = 0;
|
||||
request.final_disk_cost = index_size_in_bytes;
|
||||
request.max_memory_cost = index_size_in_bytes;
|
||||
@ -481,6 +483,15 @@ IndexFactory::CreateJsonIndex(
|
||||
}
|
||||
}
|
||||
|
||||
IndexBasePtr
|
||||
IndexFactory::CreateGeometryIndex(
|
||||
IndexType index_type,
|
||||
const storage::FileManagerContext& file_manager_context) {
|
||||
AssertInfo(index_type == RTREE_INDEX_TYPE,
|
||||
"Invalid index type for geometry index");
|
||||
return std::make_unique<RTreeIndex<std::string>>(file_manager_context);
|
||||
}
|
||||
|
||||
IndexBasePtr
|
||||
IndexFactory::CreateScalarIndex(
|
||||
const CreateIndexInfo& create_index_info,
|
||||
@ -506,6 +517,10 @@ IndexFactory::CreateScalarIndex(
|
||||
case DataType::JSON: {
|
||||
return CreateJsonIndex(create_index_info, file_manager_context);
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
return CreateGeometryIndex(create_index_info.index_type,
|
||||
file_manager_context);
|
||||
}
|
||||
default:
|
||||
ThrowInfo(DataTypeInvalid, "Invalid data type:{}", data_type);
|
||||
}
|
||||
|
||||
@ -126,6 +126,12 @@ class IndexFactory {
|
||||
const storage::FileManagerContext& file_manager_context =
|
||||
storage::FileManagerContext());
|
||||
|
||||
IndexBasePtr
|
||||
CreateGeometryIndex(
|
||||
IndexType index_type,
|
||||
const storage::FileManagerContext& file_manager_context =
|
||||
storage::FileManagerContext());
|
||||
|
||||
IndexBasePtr
|
||||
CreateScalarIndex(const CreateIndexInfo& create_index_info,
|
||||
const storage::FileManagerContext& file_manager_context =
|
||||
|
||||
@ -46,6 +46,7 @@ constexpr const char* MARISA_TRIE_UPPER = "TRIE";
|
||||
constexpr const char* INVERTED_INDEX_TYPE = "INVERTED";
|
||||
constexpr const char* BITMAP_INDEX_TYPE = "BITMAP";
|
||||
constexpr const char* HYBRID_INDEX_TYPE = "HYBRID";
|
||||
constexpr const char* RTREE_INDEX_TYPE = "RTREE";
|
||||
constexpr const char* SCALAR_INDEX_ENGINE_VERSION =
|
||||
"scalar_index_engine_version";
|
||||
constexpr const char* TANTIVY_INDEX_VERSION = "tantivy_index_version";
|
||||
|
||||
587
internal/core/src/index/RTreeIndex.cpp
Normal file
587
internal/core/src/index/RTreeIndex.cpp
Normal file
@ -0,0 +1,587 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#include <boost/filesystem.hpp>
|
||||
#include "common/Slice.h" // for INDEX_FILE_SLICE_META and Disassemble
|
||||
#include "common/EasyAssert.h"
|
||||
#include "log/Log.h"
|
||||
#include "storage/LocalChunkManagerSingleton.h"
|
||||
#include "pb/schema.pb.h"
|
||||
#include "index/Utils.h"
|
||||
#include "index/RTreeIndex.h"
|
||||
|
||||
namespace milvus::index {
|
||||
|
||||
constexpr const char* TMP_RTREE_INDEX_PREFIX = "/tmp/milvus/rtree-index/";
|
||||
|
||||
// helper to check suffix
|
||||
static inline bool
|
||||
ends_with(const std::string& value, const std::string& suffix) {
|
||||
return value.size() >= suffix.size() &&
|
||||
value.compare(value.size() - suffix.size(), suffix.size(), suffix) ==
|
||||
0;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::InitForBuildIndex() {
|
||||
auto field =
|
||||
std::to_string(disk_file_manager_->GetFieldDataMeta().field_id);
|
||||
auto prefix = disk_file_manager_->GetIndexIdentifier();
|
||||
path_ = std::string(TMP_RTREE_INDEX_PREFIX) + prefix;
|
||||
boost::filesystem::create_directories(path_);
|
||||
|
||||
std::string index_file_path = path_ + "/index_file"; // base path (no ext)
|
||||
|
||||
if (boost::filesystem::exists(index_file_path + ".bgi")) {
|
||||
ThrowInfo(
|
||||
IndexBuildError, "build rtree index temp dir:{} not empty", path_);
|
||||
}
|
||||
wrapper_ = std::make_shared<RTreeIndexWrapper>(index_file_path, true);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
RTreeIndex<T>::RTreeIndex(const storage::FileManagerContext& ctx)
|
||||
: ScalarIndex<T>(RTREE_INDEX_TYPE),
|
||||
schema_(ctx.fieldDataMeta.field_schema) {
|
||||
mem_file_manager_ = std::make_shared<MemFileManager>(ctx);
|
||||
disk_file_manager_ = std::make_shared<DiskFileManager>(ctx);
|
||||
|
||||
if (ctx.for_loading_index) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
RTreeIndex<T>::~RTreeIndex() {
|
||||
// Free wrapper explicitly to ensure files not being used
|
||||
wrapper_.reset();
|
||||
|
||||
// Remove temporary directory if it exists
|
||||
if (!path_.empty()) {
|
||||
auto local_cm = storage::LocalChunkManagerSingleton::GetInstance()
|
||||
.GetChunkManager();
|
||||
if (local_cm) {
|
||||
LOG_INFO("rtree index remove path:{}", path_);
|
||||
local_cm->RemoveDir(path_);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static std::string
|
||||
GetFileName(const std::string& path) {
|
||||
auto pos = path.find_last_of('/');
|
||||
return pos == std::string::npos ? path : path.substr(pos + 1);
|
||||
}
|
||||
|
||||
// Loading existing R-Tree index
|
||||
// The config must contain "index_files" -> vector<string>
|
||||
// Remote index objects will be downloaded to local disk via DiskFileManager,
|
||||
// then RTreeIndexWrapper will load them.
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::Load(milvus::tracer::TraceContext ctx, const Config& config) {
|
||||
LOG_DEBUG("Load RTreeIndex with config {}", config.dump());
|
||||
|
||||
auto index_files_opt =
|
||||
GetValueFromConfig<std::vector<std::string>>(config, "index_files");
|
||||
AssertInfo(index_files_opt.has_value(),
|
||||
"index file paths are empty when loading R-Tree index");
|
||||
|
||||
auto files = index_files_opt.value();
|
||||
|
||||
// 1. Extract and load null_offset file(s) if present
|
||||
{
|
||||
auto find_file = [&](const std::string& target) -> auto {
|
||||
return std::find_if(
|
||||
files.begin(), files.end(), [&](const std::string& filename) {
|
||||
return GetFileName(filename) == target;
|
||||
});
|
||||
};
|
||||
|
||||
auto fill_null_offsets = [&](const uint8_t* data, int64_t size) {
|
||||
folly::SharedMutexWritePriority::WriteHolder lock(mutex_);
|
||||
null_offset_.resize((size_t)size / sizeof(size_t));
|
||||
memcpy(null_offset_.data(), data, (size_t)size);
|
||||
};
|
||||
|
||||
auto load_priority =
|
||||
GetValueFromConfig<milvus::proto::common::LoadPriority>(
|
||||
config, milvus::LOAD_PRIORITY)
|
||||
.value_or(milvus::proto::common::LoadPriority::HIGH);
|
||||
|
||||
std::vector<std::string> null_offset_files;
|
||||
if (auto it = find_file(INDEX_FILE_SLICE_META); it != files.end()) {
|
||||
// sliced case: collect all parts with prefix index_null_offset
|
||||
null_offset_files.push_back(*it);
|
||||
for (auto& f : files) {
|
||||
auto filename = GetFileName(f);
|
||||
static const std::string kName = "index_null_offset";
|
||||
if (filename.size() >= kName.size() &&
|
||||
filename.substr(0, kName.size()) == kName) {
|
||||
null_offset_files.push_back(f);
|
||||
}
|
||||
}
|
||||
if (!null_offset_files.empty()) {
|
||||
auto index_datas = mem_file_manager_->LoadIndexToMemory(
|
||||
null_offset_files, load_priority);
|
||||
auto compacted = CompactIndexDatas(index_datas);
|
||||
auto codecs = std::move(compacted.at("index_null_offset"));
|
||||
for (auto&& codec : codecs.codecs_) {
|
||||
fill_null_offsets(codec->PayloadData(),
|
||||
codec->PayloadSize());
|
||||
}
|
||||
}
|
||||
} else if (auto it = find_file("index_null_offset");
|
||||
it != files.end()) {
|
||||
null_offset_files.push_back(*it);
|
||||
files.erase(it);
|
||||
auto index_datas = mem_file_manager_->LoadIndexToMemory(
|
||||
{*null_offset_files.begin()}, load_priority);
|
||||
auto null_data = std::move(index_datas.at("index_null_offset"));
|
||||
fill_null_offsets(null_data->PayloadData(),
|
||||
null_data->PayloadSize());
|
||||
}
|
||||
|
||||
// remove loaded null_offset files from files list
|
||||
if (!null_offset_files.empty()) {
|
||||
files.erase(std::remove_if(
|
||||
files.begin(),
|
||||
files.end(),
|
||||
[&](const std::string& f) {
|
||||
return std::find(null_offset_files.begin(),
|
||||
null_offset_files.end(),
|
||||
f) != null_offset_files.end();
|
||||
}),
|
||||
files.end());
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Ensure each file has full remote path. If only filename provided, prepend remote prefix.
|
||||
for (auto& f : files) {
|
||||
boost::filesystem::path p(f);
|
||||
if (!p.has_parent_path()) {
|
||||
auto remote_prefix = disk_file_manager_->GetRemoteIndexPrefix();
|
||||
f = remote_prefix + "/" + f;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Cache remote index files to local disk.
|
||||
auto load_priority =
|
||||
GetValueFromConfig<milvus::proto::common::LoadPriority>(
|
||||
config, milvus::LOAD_PRIORITY)
|
||||
.value_or(milvus::proto::common::LoadPriority::HIGH);
|
||||
disk_file_manager_->CacheIndexToDisk(files, load_priority);
|
||||
|
||||
// 4. Determine local base path (without extension) for RTreeIndexWrapper.
|
||||
auto local_paths = disk_file_manager_->GetLocalFilePaths();
|
||||
AssertInfo(!local_paths.empty(),
|
||||
"RTreeIndex local files are empty after caching to disk");
|
||||
|
||||
// Pick a .dat or .idx file explicitly; avoid meta or others.
|
||||
std::string base_path;
|
||||
for (const auto& p : local_paths) {
|
||||
if (ends_with(p, ".bgi")) {
|
||||
base_path = p.substr(0, p.size() - 4);
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Fallback: if not found, try meta json
|
||||
if (base_path.empty()) {
|
||||
for (const auto& p : local_paths) {
|
||||
if (ends_with(p, ".meta.json")) {
|
||||
base_path =
|
||||
p.substr(0, p.size() - std::string(".meta.json").size());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Final fallback: use the first path as-is
|
||||
if (base_path.empty()) {
|
||||
base_path = local_paths.front();
|
||||
}
|
||||
path_ = base_path;
|
||||
|
||||
// 5. Instantiate wrapper and load.
|
||||
wrapper_ =
|
||||
std::make_shared<RTreeIndexWrapper>(path_, /*is_build_mode=*/false);
|
||||
wrapper_->load();
|
||||
|
||||
total_num_rows_ =
|
||||
wrapper_->count() + static_cast<int64_t>(null_offset_.size());
|
||||
is_built_ = true;
|
||||
|
||||
LOG_INFO(
|
||||
"Loaded R-Tree index from {} with {} rows", path_, total_num_rows_);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::Build(const Config& config) {
|
||||
InitForBuildIndex();
|
||||
|
||||
// load raw WKB data into memory
|
||||
auto field_datas = mem_file_manager_->CacheRawDataToMemory(config);
|
||||
BuildWithFieldData(field_datas);
|
||||
// after build, mark built
|
||||
total_num_rows_ =
|
||||
wrapper_->count() + static_cast<int64_t>(null_offset_.size());
|
||||
is_built_ = true;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::BuildWithFieldData(
|
||||
const std::vector<FieldDataPtr>& field_datas) {
|
||||
// Default to bulk load for build performance
|
||||
// If needed, we can wire a config switch later to disable it.
|
||||
bool use_bulk_load = true;
|
||||
if (use_bulk_load) {
|
||||
// Single pass: collect null offsets locally and compute total rows
|
||||
int64_t total_rows = 0;
|
||||
if (schema_.nullable()) {
|
||||
std::vector<size_t> local_nulls;
|
||||
int64_t global_offset = 0;
|
||||
for (const auto& fd : field_datas) {
|
||||
const auto n = fd->get_num_rows();
|
||||
for (int64_t i = 0; i < n; ++i) {
|
||||
if (!fd->is_valid(i)) {
|
||||
local_nulls.push_back(
|
||||
static_cast<size_t>(global_offset));
|
||||
}
|
||||
++global_offset;
|
||||
}
|
||||
total_rows += n;
|
||||
}
|
||||
if (!local_nulls.empty()) {
|
||||
folly::SharedMutexWritePriority::WriteHolder lock(mutex_);
|
||||
null_offset_.reserve(null_offset_.size() + local_nulls.size());
|
||||
null_offset_.insert(
|
||||
null_offset_.end(), local_nulls.begin(), local_nulls.end());
|
||||
}
|
||||
} else {
|
||||
for (const auto& fd : field_datas) {
|
||||
total_rows += fd->get_num_rows();
|
||||
}
|
||||
}
|
||||
// bulk load non-null geometries
|
||||
wrapper_->bulk_load_from_field_data(field_datas, schema_.nullable());
|
||||
total_num_rows_ = total_rows;
|
||||
is_built_ = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::finish() {
|
||||
if (wrapper_) {
|
||||
LOG_INFO("rtree index finish");
|
||||
wrapper_->finish();
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
IndexStatsPtr
|
||||
RTreeIndex<T>::Upload(const Config& config) {
|
||||
// 1. Ensure all buffered data flushed to disk
|
||||
finish();
|
||||
|
||||
// 2. Walk temp dir and register files to DiskFileManager
|
||||
boost::filesystem::path dir(path_);
|
||||
boost::filesystem::directory_iterator end_iter;
|
||||
|
||||
for (boost::filesystem::directory_iterator it(dir); it != end_iter; ++it) {
|
||||
if (boost::filesystem::is_directory(*it)) {
|
||||
LOG_WARN("{} is a directory, skip", it->path().string());
|
||||
continue;
|
||||
}
|
||||
|
||||
AssertInfo(disk_file_manager_->AddFile(it->path().string()),
|
||||
"failed to add index file: {}",
|
||||
it->path().string());
|
||||
}
|
||||
|
||||
// 3. Collect remote paths to size mapping
|
||||
auto remote_paths_to_size = disk_file_manager_->GetRemotePathsToFileSize();
|
||||
|
||||
// 4. Serialize and register in-memory null_offset if any
|
||||
auto binary_set = Serialize(config);
|
||||
mem_file_manager_->AddFile(binary_set);
|
||||
auto remote_mem_path_to_size =
|
||||
mem_file_manager_->GetRemotePathsToFileSize();
|
||||
|
||||
// 5. Assemble IndexStats result
|
||||
std::vector<SerializedIndexFileInfo> index_files;
|
||||
index_files.reserve(remote_paths_to_size.size() +
|
||||
remote_mem_path_to_size.size());
|
||||
for (auto& kv : remote_paths_to_size) {
|
||||
index_files.emplace_back(kv.first, kv.second);
|
||||
}
|
||||
for (auto& kv : remote_mem_path_to_size) {
|
||||
index_files.emplace_back(kv.first, kv.second);
|
||||
}
|
||||
|
||||
int64_t mem_size = mem_file_manager_->GetAddedTotalMemSize();
|
||||
int64_t file_size = disk_file_manager_->GetAddedTotalFileSize();
|
||||
|
||||
return IndexStats::New(mem_size + file_size, std::move(index_files));
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
BinarySet
|
||||
RTreeIndex<T>::Serialize(const Config& config) {
|
||||
folly::SharedMutexWritePriority::ReadHolder lock(mutex_);
|
||||
auto bytes = null_offset_.size() * sizeof(size_t);
|
||||
BinarySet res_set;
|
||||
if (bytes > 0) {
|
||||
std::shared_ptr<uint8_t[]> buf(new uint8_t[bytes]);
|
||||
std::memcpy(buf.get(), null_offset_.data(), bytes);
|
||||
res_set.Append("index_null_offset", buf, bytes);
|
||||
}
|
||||
milvus::Disassemble(res_set);
|
||||
return res_set;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::Load(const BinarySet& binary_set, const Config& config) {
|
||||
ThrowInfo(ErrorCode::NotImplemented,
|
||||
"Load(BinarySet) is not yet supported for RTreeIndex");
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::Build(size_t n, const T* values, const bool* valid_data) {
|
||||
// Generic Build by value array is not required for RTree at the moment.
|
||||
ThrowInfo(ErrorCode::NotImplemented,
|
||||
"Build(size_t, values, valid) not supported for RTreeIndex");
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
const TargetBitmap
|
||||
RTreeIndex<T>::In(size_t n, const T* values) {
|
||||
ThrowInfo(ErrorCode::NotImplemented, "In() not supported for RTreeIndex");
|
||||
return {};
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
const TargetBitmap
|
||||
RTreeIndex<T>::IsNull() {
|
||||
int64_t count = Count();
|
||||
TargetBitmap bitset(count);
|
||||
folly::SharedMutexWritePriority::ReadHolder lock(mutex_);
|
||||
auto end = std::lower_bound(
|
||||
null_offset_.begin(), null_offset_.end(), static_cast<size_t>(count));
|
||||
for (auto it = null_offset_.begin(); it != end; ++it) {
|
||||
bitset.set(*it);
|
||||
}
|
||||
return bitset;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
TargetBitmap
|
||||
RTreeIndex<T>::IsNotNull() {
|
||||
int64_t count = Count();
|
||||
TargetBitmap bitset(count, true);
|
||||
folly::SharedMutexWritePriority::ReadHolder lock(mutex_);
|
||||
auto end = std::lower_bound(
|
||||
null_offset_.begin(), null_offset_.end(), static_cast<size_t>(count));
|
||||
for (auto it = null_offset_.begin(); it != end; ++it) {
|
||||
bitset.reset(*it);
|
||||
}
|
||||
return bitset;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
const TargetBitmap
|
||||
RTreeIndex<T>::InApplyFilter(size_t n,
|
||||
const T* values,
|
||||
const std::function<bool(size_t)>& filter) {
|
||||
ThrowInfo(ErrorCode::NotImplemented,
|
||||
"InApplyFilter() not supported for RTreeIndex");
|
||||
return {};
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::InApplyCallback(size_t n,
|
||||
const T* values,
|
||||
const std::function<void(size_t)>& callback) {
|
||||
ThrowInfo(ErrorCode::NotImplemented,
|
||||
"InApplyCallback() not supported for RTreeIndex");
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
const TargetBitmap
|
||||
RTreeIndex<T>::NotIn(size_t n, const T* values) {
|
||||
ThrowInfo(ErrorCode::NotImplemented,
|
||||
"NotIn() not supported for RTreeIndex");
|
||||
return {};
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
const TargetBitmap
|
||||
RTreeIndex<T>::Range(T value, OpType op) {
|
||||
ThrowInfo(ErrorCode::NotImplemented,
|
||||
"Range(value, op) not supported for RTreeIndex");
|
||||
return {};
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
const TargetBitmap
|
||||
RTreeIndex<T>::Range(T lower_bound_value,
|
||||
bool lb_inclusive,
|
||||
T upper_bound_value,
|
||||
bool ub_inclusive) {
|
||||
ThrowInfo(ErrorCode::NotImplemented,
|
||||
"Range(lower, upper) not supported for RTreeIndex");
|
||||
return {};
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::QueryCandidates(proto::plan::GISFunctionFilterExpr_GISOp op,
|
||||
const Geometry query_geometry,
|
||||
std::vector<int64_t>& candidate_offsets) {
|
||||
AssertInfo(wrapper_ != nullptr, "R-Tree index wrapper is null");
|
||||
|
||||
// Create GEOS context and ensure it's properly released
|
||||
GEOSContextHandle_t ctx = GEOS_init_r();
|
||||
|
||||
wrapper_->query_candidates(
|
||||
op, query_geometry.GetGeometry(), ctx, candidate_offsets);
|
||||
GEOS_finish_r(ctx);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
const TargetBitmap
|
||||
RTreeIndex<T>::Query(const DatasetPtr& dataset) {
|
||||
AssertInfo(schema_.data_type() == proto::schema::DataType::Geometry,
|
||||
"RTreeIndex can only be queried on geometry field");
|
||||
auto op =
|
||||
dataset->Get<proto::plan::GISFunctionFilterExpr_GISOp>(OPERATOR_TYPE);
|
||||
// Query geometry WKB passed via MATCH_VALUE as std::string
|
||||
auto geometry = dataset->Get<Geometry>(MATCH_VALUE);
|
||||
|
||||
// 1) Coarse candidates by R-Tree on MBR
|
||||
std::vector<int64_t> candidate_offsets;
|
||||
QueryCandidates(op, geometry, candidate_offsets);
|
||||
|
||||
// 2) Build initial bitmap from candidates
|
||||
TargetBitmap res(this->Count());
|
||||
for (auto off : candidate_offsets) {
|
||||
if (off >= 0 && off < res.size()) {
|
||||
res.set(off);
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------
|
||||
// BuildWithRawDataForUT – real implementation for unit-test scenarios
|
||||
// ------------------------------------------------------------------
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::BuildWithRawDataForUT(size_t n,
|
||||
const void* values,
|
||||
const Config& config) {
|
||||
// In UT we directly receive an array of std::string (WKB) with length n.
|
||||
const std::string* wkb_array = reinterpret_cast<const std::string*>(values);
|
||||
|
||||
// Guard: n should represent number of strings not raw bytes
|
||||
AssertInfo(n > 0, "BuildWithRawDataForUT expects element count > 0");
|
||||
LOG_WARN("BuildWithRawDataForUT:{}", n);
|
||||
this->InitForBuildIndex();
|
||||
|
||||
int64_t offset = 0;
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
const auto& wkb = wkb_array[i];
|
||||
const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(wkb.data());
|
||||
this->wrapper_->add_geometry(data_ptr, wkb.size(), offset++);
|
||||
}
|
||||
this->finish();
|
||||
LOG_WARN("BuildWithRawDataForUT finish");
|
||||
this->total_num_rows_ = offset;
|
||||
LOG_WARN("BuildWithRawDataForUT total_num_rows_:{}", this->total_num_rows_);
|
||||
this->is_built_ = true;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::BuildWithStrings(const std::vector<std::string>& geometries) {
|
||||
AssertInfo(!geometries.empty(),
|
||||
"BuildWithStrings expects non-empty geometries");
|
||||
LOG_INFO("BuildWithStrings: building RTree index for {} geometries",
|
||||
geometries.size());
|
||||
|
||||
this->InitForBuildIndex();
|
||||
|
||||
int64_t offset = 0;
|
||||
for (const auto& wkb : geometries) {
|
||||
if (!wkb.empty()) {
|
||||
const uint8_t* data_ptr =
|
||||
reinterpret_cast<const uint8_t*>(wkb.data());
|
||||
this->wrapper_->add_geometry(data_ptr, wkb.size(), offset);
|
||||
} else {
|
||||
// Handle null geometry
|
||||
this->null_offset_.push_back(offset);
|
||||
}
|
||||
offset++;
|
||||
}
|
||||
|
||||
this->finish();
|
||||
this->total_num_rows_ = offset;
|
||||
this->is_built_ = true;
|
||||
|
||||
LOG_INFO("BuildWithStrings: completed building RTree index, total_rows: {}",
|
||||
this->total_num_rows_);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
RTreeIndex<T>::AddGeometry(const std::string& wkb_data, int64_t row_offset) {
|
||||
if (!wrapper_) {
|
||||
// Initialize if not already done
|
||||
this->InitForBuildIndex();
|
||||
}
|
||||
|
||||
if (!wkb_data.empty()) {
|
||||
const uint8_t* data_ptr =
|
||||
reinterpret_cast<const uint8_t*>(wkb_data.data());
|
||||
wrapper_->add_geometry(data_ptr, wkb_data.size(), row_offset);
|
||||
|
||||
// Update total row count
|
||||
if (row_offset >= total_num_rows_) {
|
||||
total_num_rows_ = row_offset + 1;
|
||||
}
|
||||
|
||||
LOG_DEBUG("Added geometry at row offset {}", row_offset);
|
||||
} else {
|
||||
// Handle null geometry
|
||||
folly::SharedMutexWritePriority::WriteHolder lock(mutex_);
|
||||
null_offset_.push_back(static_cast<size_t>(row_offset));
|
||||
|
||||
// Update total row count
|
||||
if (row_offset >= total_num_rows_) {
|
||||
total_num_rows_ = row_offset + 1;
|
||||
}
|
||||
|
||||
LOG_DEBUG("Added null geometry at row offset {}", row_offset);
|
||||
}
|
||||
}
|
||||
|
||||
// Explicit template instantiation for std::string as we only support string field for now.
|
||||
template class RTreeIndex<std::string>;
|
||||
|
||||
} // namespace milvus::index
|
||||
184
internal/core/src/index/RTreeIndex.h
Normal file
184
internal/core/src/index/RTreeIndex.h
Normal file
@ -0,0 +1,184 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <vector>
|
||||
#include <folly/SharedMutex.h>
|
||||
#include "storage/FileManager.h"
|
||||
#include "storage/DiskFileManagerImpl.h"
|
||||
#include "storage/MemFileManagerImpl.h"
|
||||
#include "index/RTreeIndexWrapper.h"
|
||||
#include "index/ScalarIndex.h"
|
||||
#include "index/Meta.h"
|
||||
#include "pb/plan.pb.h"
|
||||
|
||||
namespace milvus::index {
|
||||
|
||||
using RTreeIndexWrapper = milvus::index::RTreeIndexWrapper;
|
||||
|
||||
template <typename T>
|
||||
class RTreeIndex : public ScalarIndex<T> {
|
||||
public:
|
||||
using MemFileManager = storage::MemFileManagerImpl;
|
||||
using MemFileManagerPtr = std::shared_ptr<MemFileManager>;
|
||||
using DiskFileManager = storage::DiskFileManagerImpl;
|
||||
using DiskFileManagerPtr = std::shared_ptr<DiskFileManager>;
|
||||
|
||||
RTreeIndex() : ScalarIndex<T>(RTREE_INDEX_TYPE) {
|
||||
}
|
||||
|
||||
explicit RTreeIndex(
|
||||
const storage::FileManagerContext& ctx = storage::FileManagerContext());
|
||||
|
||||
~RTreeIndex();
|
||||
|
||||
void
|
||||
InitForBuildIndex();
|
||||
|
||||
void
|
||||
Load(milvus::tracer::TraceContext ctx, const Config& config = {}) override;
|
||||
|
||||
// Load index from an already assembled BinarySet (not used by RTree yet)
|
||||
void
|
||||
Load(const BinarySet& binary_set, const Config& config = {}) override;
|
||||
|
||||
ScalarIndexType
|
||||
GetIndexType() const override {
|
||||
return ScalarIndexType::RTREE;
|
||||
}
|
||||
|
||||
void
|
||||
Build(const Config& config = {}) override;
|
||||
|
||||
// Build index directly from in-memory value array (required by ScalarIndex)
|
||||
void
|
||||
Build(size_t n, const T* values, const bool* valid_data = nullptr) override;
|
||||
|
||||
int64_t
|
||||
Count() override {
|
||||
if (is_built_) {
|
||||
return total_num_rows_;
|
||||
}
|
||||
return wrapper_ ? wrapper_->count() +
|
||||
static_cast<int64_t>(null_offset_.size())
|
||||
: 0;
|
||||
}
|
||||
|
||||
// BuildWithRawDataForUT should be only used in ut. Only string is supported.
|
||||
void
|
||||
BuildWithRawDataForUT(size_t n,
|
||||
const void* values,
|
||||
const Config& config = {}) override;
|
||||
|
||||
// Build index with string data (WKB format) for growing segment
|
||||
void
|
||||
BuildWithStrings(const std::vector<std::string>& geometries);
|
||||
|
||||
// Add single geometry incrementally (for growing segment)
|
||||
void
|
||||
AddGeometry(const std::string& wkb_data, int64_t row_offset);
|
||||
|
||||
BinarySet
|
||||
Serialize(const Config& config) override;
|
||||
|
||||
IndexStatsPtr
|
||||
Upload(const Config& config = {}) override;
|
||||
|
||||
const TargetBitmap
|
||||
In(size_t n, const T* values) override;
|
||||
|
||||
const TargetBitmap
|
||||
IsNull() override;
|
||||
|
||||
TargetBitmap
|
||||
IsNotNull() override;
|
||||
|
||||
const TargetBitmap
|
||||
InApplyFilter(
|
||||
size_t n,
|
||||
const T* values,
|
||||
const std::function<bool(size_t /* offset */)>& filter) override;
|
||||
|
||||
void
|
||||
InApplyCallback(
|
||||
size_t n,
|
||||
const T* values,
|
||||
const std::function<void(size_t /* offset */)>& callback) override;
|
||||
|
||||
const TargetBitmap
|
||||
NotIn(size_t n, const T* values) override;
|
||||
|
||||
const TargetBitmap
|
||||
Range(T value, OpType op) override;
|
||||
|
||||
const TargetBitmap
|
||||
Range(T lower_bound_value,
|
||||
bool lb_inclusive,
|
||||
T upper_bound_value,
|
||||
bool ub_inclusive) override;
|
||||
|
||||
const bool
|
||||
HasRawData() const override {
|
||||
return false;
|
||||
}
|
||||
|
||||
std::optional<T>
|
||||
Reverse_Lookup(size_t offset) const override {
|
||||
ThrowInfo(ErrorCode::NotImplemented,
|
||||
"Reverse_Lookup should not be handled by R-Tree index");
|
||||
}
|
||||
|
||||
int64_t
|
||||
Size() override {
|
||||
return Count();
|
||||
}
|
||||
|
||||
// GIS-specific query methods
|
||||
/**
|
||||
* @brief Query candidates based on spatial operation
|
||||
* @param op Spatial operation type
|
||||
* @param query_geom Query geometry in WKB format
|
||||
* @param candidate_offsets Output vector of candidate row offsets
|
||||
*/
|
||||
void
|
||||
QueryCandidates(proto::plan::GISFunctionFilterExpr_GISOp op,
|
||||
const Geometry query_geometry,
|
||||
std::vector<int64_t>& candidate_offsets);
|
||||
|
||||
const TargetBitmap
|
||||
Query(const DatasetPtr& dataset) override;
|
||||
|
||||
void
|
||||
BuildWithFieldData(const std::vector<FieldDataPtr>& datas) override;
|
||||
|
||||
protected:
|
||||
void
|
||||
finish();
|
||||
|
||||
protected:
|
||||
std::shared_ptr<RTreeIndexWrapper> wrapper_;
|
||||
std::string path_;
|
||||
proto::schema::FieldSchema schema_;
|
||||
|
||||
MemFileManagerPtr mem_file_manager_;
|
||||
DiskFileManagerPtr disk_file_manager_;
|
||||
|
||||
// Index state
|
||||
bool is_built_ = false;
|
||||
int64_t total_num_rows_ = 0;
|
||||
|
||||
// Track null rows to support IsNull/IsNotNull just like other scalar indexes
|
||||
folly::SharedMutexWritePriority mutex_{};
|
||||
std::vector<size_t> null_offset_;
|
||||
};
|
||||
} // namespace milvus::index
|
||||
147
internal/core/src/index/RTreeIndexSerialization.h
Normal file
147
internal/core/src/index/RTreeIndexSerialization.h
Normal file
@ -0,0 +1,147 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
|
||||
#include <boost/geometry.hpp>
|
||||
#include <boost/geometry/geometries/box.hpp>
|
||||
#include <boost/geometry/geometries/point.hpp>
|
||||
#include <boost/geometry/index/rtree.hpp>
|
||||
#include <boost/serialization/serialization.hpp>
|
||||
#include <boost/serialization/string.hpp>
|
||||
|
||||
#include <boost/archive/binary_iarchive.hpp>
|
||||
#include <boost/archive/binary_oarchive.hpp>
|
||||
#include <boost/archive/text_iarchive.hpp>
|
||||
#include <boost/archive/text_oarchive.hpp>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
#include <boost/serialization/split_free.hpp>
|
||||
#include <boost/serialization/utility.hpp>
|
||||
#include <boost/serialization/vector.hpp>
|
||||
|
||||
class RTreeSerializer {
|
||||
public:
|
||||
template <typename RTreeType>
|
||||
static bool
|
||||
saveBinary(const RTreeType& tree, const std::string& filename) {
|
||||
try {
|
||||
std::ofstream ofs(filename, std::ios::binary);
|
||||
if (!ofs.is_open()) {
|
||||
std::cerr << "Cannot open file for writing: " << filename
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
boost::archive::binary_oarchive oa(ofs);
|
||||
oa << tree;
|
||||
|
||||
ofs.close();
|
||||
return true;
|
||||
} catch (const std::exception& e) {
|
||||
std::cerr << "Serialization error: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename RTreeType>
|
||||
static bool
|
||||
loadBinary(RTreeType& tree, const std::string& filename) {
|
||||
try {
|
||||
std::ifstream ifs(filename, std::ios::binary);
|
||||
if (!ifs.is_open()) {
|
||||
std::cerr << "Cannot open file for reading: " << filename
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
boost::archive::binary_iarchive ia(ifs);
|
||||
ia >> tree;
|
||||
|
||||
ifs.close();
|
||||
return true;
|
||||
} catch (const std::exception& e) {
|
||||
std::cerr << "Deserialization error: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename RTreeType>
|
||||
static bool
|
||||
saveText(const RTreeType& tree, const std::string& filename) {
|
||||
try {
|
||||
std::ofstream ofs(filename);
|
||||
if (!ofs.is_open()) {
|
||||
std::cerr << "Cannot open file for writing: " << filename
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
boost::archive::text_oarchive oa(ofs);
|
||||
oa << tree;
|
||||
|
||||
ofs.close();
|
||||
return true;
|
||||
} catch (const std::exception& e) {
|
||||
std::cerr << "Serialization error: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename RTreeType>
|
||||
static bool
|
||||
loadText(RTreeType& tree, const std::string& filename) {
|
||||
try {
|
||||
std::ifstream ifs(filename);
|
||||
if (!ifs.is_open()) {
|
||||
std::cerr << "Cannot open file for reading: " << filename
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
boost::archive::text_iarchive ia(ifs);
|
||||
ia >> tree;
|
||||
|
||||
ifs.close();
|
||||
return true;
|
||||
} catch (const std::exception& e) {
|
||||
std::cerr << "Deserialization error: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename RTreeType>
|
||||
static std::string
|
||||
serializeToString(const RTreeType& tree) {
|
||||
std::ostringstream oss;
|
||||
boost::archive::binary_oarchive oa(oss);
|
||||
oa << tree;
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
template <typename RTreeType>
|
||||
static bool
|
||||
deserializeFromString(RTreeType& tree, const std::string& data) {
|
||||
try {
|
||||
std::istringstream iss(data);
|
||||
boost::archive::binary_iarchive ia(iss);
|
||||
ia >> tree;
|
||||
return true;
|
||||
} catch (const std::exception& e) {
|
||||
std::cerr << "Deserialization error: " << e.what() << std::endl;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
289
internal/core/src/index/RTreeIndexWrapper.cpp
Normal file
289
internal/core/src/index/RTreeIndexWrapper.cpp
Normal file
@ -0,0 +1,289 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "log/Log.h"
|
||||
#include "pb/plan.pb.h"
|
||||
#include <filesystem>
|
||||
#include <fstream>
|
||||
#include <mutex>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "common/FieldDataInterface.h"
|
||||
#include "RTreeIndexWrapper.h"
|
||||
#include "RTreeIndexSerialization.h"
|
||||
|
||||
namespace milvus::index {
|
||||
|
||||
RTreeIndexWrapper::RTreeIndexWrapper(std::string& path, bool is_build_mode)
|
||||
: index_path_(path), is_build_mode_(is_build_mode) {
|
||||
if (is_build_mode_) {
|
||||
std::filesystem::path dir_path =
|
||||
std::filesystem::path(path).parent_path();
|
||||
if (!dir_path.empty()) {
|
||||
std::filesystem::create_directories(dir_path);
|
||||
}
|
||||
// Start with an empty rtree for dynamic insertions
|
||||
rtree_ = RTree();
|
||||
}
|
||||
}
|
||||
|
||||
RTreeIndexWrapper::~RTreeIndexWrapper() = default;
|
||||
|
||||
void
|
||||
RTreeIndexWrapper::add_geometry(const uint8_t* wkb_data,
|
||||
size_t len,
|
||||
int64_t row_offset) {
|
||||
// Acquire write lock to protect rtree_
|
||||
std::unique_lock<std::shared_mutex> guard(rtree_mutex_);
|
||||
|
||||
AssertInfo(is_build_mode_, "Cannot add geometry in load mode");
|
||||
|
||||
// Parse WKB data using GEOS for consistency
|
||||
GEOSContextHandle_t ctx = GEOS_init_r();
|
||||
if (ctx == nullptr) {
|
||||
LOG_ERROR("Failed to initialize GEOS context for row {}", row_offset);
|
||||
return;
|
||||
}
|
||||
|
||||
GEOSWKBReader* reader = GEOSWKBReader_create_r(ctx);
|
||||
if (reader == nullptr) {
|
||||
GEOS_finish_r(ctx);
|
||||
LOG_ERROR("Failed to create GEOS WKB reader for row {}", row_offset);
|
||||
return;
|
||||
}
|
||||
|
||||
GEOSGeometry* geom = GEOSWKBReader_read_r(ctx, reader, wkb_data, len);
|
||||
GEOSWKBReader_destroy_r(ctx, reader);
|
||||
|
||||
if (geom == nullptr) {
|
||||
GEOS_finish_r(ctx);
|
||||
LOG_ERROR("Failed to parse WKB data for row {}", row_offset);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get bounding box
|
||||
double minX, minY, maxX, maxY;
|
||||
get_bounding_box(geom, ctx, minX, minY, maxX, maxY);
|
||||
|
||||
// Create Boost box and insert
|
||||
Box box(Point(minX, minY), Point(maxX, maxY));
|
||||
Value val(box, row_offset);
|
||||
values_.push_back(val);
|
||||
rtree_.insert(val);
|
||||
|
||||
// Clean up
|
||||
GEOSGeom_destroy_r(ctx, geom);
|
||||
GEOS_finish_r(ctx);
|
||||
}
|
||||
|
||||
// No IDataStream; bulk-load implemented directly for Boost R-tree
|
||||
|
||||
void
|
||||
RTreeIndexWrapper::bulk_load_from_field_data(
|
||||
const std::vector<std::shared_ptr<::milvus::FieldDataBase>>& field_datas,
|
||||
bool nullable) {
|
||||
// Acquire write lock to protect rtree_ creation and modification
|
||||
std::unique_lock<std::shared_mutex> guard(rtree_mutex_);
|
||||
|
||||
AssertInfo(is_build_mode_, "Cannot bulk load in load mode");
|
||||
|
||||
// Initialize GEOS context for bulk operations
|
||||
GEOSContextHandle_t ctx = GEOS_init_r();
|
||||
if (ctx == nullptr) {
|
||||
LOG_ERROR("Failed to initialize GEOS context for bulk load");
|
||||
return;
|
||||
}
|
||||
|
||||
GEOSWKBReader* reader = GEOSWKBReader_create_r(ctx);
|
||||
if (reader == nullptr) {
|
||||
GEOS_finish_r(ctx);
|
||||
LOG_ERROR("Failed to create GEOS WKB reader for bulk load");
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<Value> local_values;
|
||||
local_values.reserve(1024);
|
||||
int64_t absolute_offset = 0;
|
||||
for (const auto& fd : field_datas) {
|
||||
const auto n = fd->get_num_rows();
|
||||
for (int64_t i = 0; i < n; ++i, ++absolute_offset) {
|
||||
const bool is_nullable_effective = nullable || fd->IsNullable();
|
||||
if (is_nullable_effective && !fd->is_valid(i)) {
|
||||
continue;
|
||||
}
|
||||
const auto* wkb_str =
|
||||
static_cast<const std::string*>(fd->RawValue(i));
|
||||
if (wkb_str == nullptr || wkb_str->empty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
GEOSGeometry* geom = GEOSWKBReader_read_r(
|
||||
ctx,
|
||||
reader,
|
||||
reinterpret_cast<const unsigned char*>(wkb_str->data()),
|
||||
wkb_str->size());
|
||||
if (geom == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
double minX, minY, maxX, maxY;
|
||||
get_bounding_box(geom, ctx, minX, minY, maxX, maxY);
|
||||
GEOSGeom_destroy_r(ctx, geom);
|
||||
|
||||
Box box(Point(minX, minY), Point(maxX, maxY));
|
||||
local_values.emplace_back(box, absolute_offset);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up GEOS resources
|
||||
GEOSWKBReader_destroy_r(ctx, reader);
|
||||
GEOS_finish_r(ctx);
|
||||
values_.swap(local_values);
|
||||
rtree_ = RTree(values_.begin(), values_.end());
|
||||
LOG_INFO("R-Tree bulk load (Boost) completed with {} entries",
|
||||
values_.size());
|
||||
}
|
||||
|
||||
void
|
||||
RTreeIndexWrapper::finish() {
|
||||
// Acquire write lock to protect rtree_ modification and cleanup
|
||||
// Guard against repeated invocations which could otherwise attempt to
|
||||
// release resources multiple times (e.g. BuildWithRawDataForUT() calls
|
||||
// finish(), and Upload() may call it again).
|
||||
std::unique_lock<std::shared_mutex> guard(rtree_mutex_);
|
||||
if (finished_) {
|
||||
LOG_DEBUG("RTreeIndexWrapper::finish() called more than once, skip.");
|
||||
return;
|
||||
}
|
||||
|
||||
AssertInfo(is_build_mode_, "Cannot finish in load mode");
|
||||
|
||||
// Persist to disk: write meta and binary data file
|
||||
try {
|
||||
// Write binary rtree data
|
||||
RTreeSerializer::saveBinary(rtree_, index_path_ + ".bgi");
|
||||
|
||||
// Write meta json
|
||||
nlohmann::json meta;
|
||||
meta["dimension"] = dimension_;
|
||||
meta["count"] = static_cast<uint64_t>(values_.size());
|
||||
|
||||
std::ofstream ofs(index_path_ + ".meta.json", std::ios::trunc);
|
||||
if (ofs.fail()) {
|
||||
ThrowInfo(ErrorCode::FileOpenFailed,
|
||||
"Failed to open R-Tree meta file: {}.meta.json",
|
||||
index_path_);
|
||||
}
|
||||
if (!(ofs << meta.dump())) {
|
||||
ThrowInfo(ErrorCode::FileWriteFailed,
|
||||
"Failed to write R-Tree meta file: {}.meta.json",
|
||||
index_path_);
|
||||
}
|
||||
ofs.close();
|
||||
LOG_INFO("R-Tree meta written: {}.meta.json", index_path_);
|
||||
} catch (const std::exception& e) {
|
||||
ThrowInfo(ErrorCode::UnexpectedError,
|
||||
fmt::format("Failed to write R-Tree files: {}", e.what()));
|
||||
}
|
||||
|
||||
finished_ = true;
|
||||
|
||||
LOG_INFO("R-Tree index (Boost) finished building and saved to {}",
|
||||
index_path_);
|
||||
}
|
||||
|
||||
void
|
||||
RTreeIndexWrapper::load() {
|
||||
// Acquire write lock to protect rtree_ initialization during loading
|
||||
std::unique_lock<std::shared_mutex> guard(rtree_mutex_);
|
||||
|
||||
AssertInfo(!is_build_mode_, "Cannot load in build mode");
|
||||
|
||||
try {
|
||||
// Read meta (optional)
|
||||
try {
|
||||
std::ifstream ifs(index_path_ + ".meta.json");
|
||||
if (ifs.good()) {
|
||||
auto meta = nlohmann::json::parse(ifs);
|
||||
// index/leaf capacities are ignored for Boost implementation
|
||||
if (meta.contains("dimension"))
|
||||
dimension_ = meta["dimension"].get<uint32_t>();
|
||||
}
|
||||
} catch (const std::exception& e) {
|
||||
LOG_WARN("Failed to read meta json: {}", e.what());
|
||||
}
|
||||
|
||||
// Read binary data
|
||||
RTreeSerializer::loadBinary(rtree_, index_path_ + ".bgi");
|
||||
|
||||
LOG_INFO("R-Tree index (Boost) loaded from {}", index_path_);
|
||||
} catch (const std::exception& e) {
|
||||
ThrowInfo(ErrorCode::UnexpectedError,
|
||||
fmt::format("Failed to load R-Tree index from {}: {}",
|
||||
index_path_,
|
||||
e.what()));
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
RTreeIndexWrapper::query_candidates(proto::plan::GISFunctionFilterExpr_GISOp op,
|
||||
const GEOSGeometry* query_geom,
|
||||
GEOSContextHandle_t ctx,
|
||||
std::vector<int64_t>& candidate_offsets) {
|
||||
candidate_offsets.clear();
|
||||
|
||||
// Get bounding box of query geometry
|
||||
double minX, minY, maxX, maxY;
|
||||
get_bounding_box(query_geom, ctx, minX, minY, maxX, maxY);
|
||||
|
||||
// Create query box
|
||||
Box query_box(Point(minX, minY), Point(maxX, maxY));
|
||||
|
||||
// Perform coarse intersection query
|
||||
std::vector<Value> results;
|
||||
{
|
||||
std::shared_lock<std::shared_mutex> guard(rtree_mutex_);
|
||||
rtree_.query(boost::geometry::index::intersects(query_box),
|
||||
std::back_inserter(results));
|
||||
}
|
||||
candidate_offsets.reserve(results.size());
|
||||
for (const auto& v : results) {
|
||||
candidate_offsets.push_back(v.second);
|
||||
}
|
||||
|
||||
LOG_DEBUG("R-Tree query returned {} candidates for operation {}",
|
||||
candidate_offsets.size(),
|
||||
static_cast<int>(op));
|
||||
}
|
||||
|
||||
void
|
||||
RTreeIndexWrapper::get_bounding_box(const GEOSGeometry* geom,
|
||||
GEOSContextHandle_t ctx,
|
||||
double& minX,
|
||||
double& minY,
|
||||
double& maxX,
|
||||
double& maxY) {
|
||||
AssertInfo(geom != nullptr, "Geometry is null");
|
||||
AssertInfo(ctx != nullptr, "GEOS context is null");
|
||||
|
||||
GEOSGeom_getXMin_r(ctx, geom, &minX);
|
||||
GEOSGeom_getXMax_r(ctx, geom, &maxX);
|
||||
GEOSGeom_getYMin_r(ctx, geom, &minY);
|
||||
GEOSGeom_getYMax_r(ctx, geom, &maxY);
|
||||
}
|
||||
|
||||
int64_t
|
||||
RTreeIndexWrapper::count() const {
|
||||
return static_cast<int64_t>(rtree_.size());
|
||||
}
|
||||
|
||||
// index/leaf capacity setters removed; not applicable for Boost rtree
|
||||
} // namespace milvus::index
|
||||
143
internal/core/src/index/RTreeIndexWrapper.h
Normal file
143
internal/core/src/index/RTreeIndexWrapper.h
Normal file
@ -0,0 +1,143 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <shared_mutex>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <boost/geometry.hpp>
|
||||
#include <boost/geometry/index/rtree.hpp>
|
||||
#include <geos_c.h>
|
||||
#include "pb/plan.pb.h"
|
||||
|
||||
// Forward declaration to avoid pulling heavy field data headers here
|
||||
namespace milvus {
|
||||
class FieldDataBase;
|
||||
}
|
||||
|
||||
namespace milvus::index {
|
||||
|
||||
namespace bg = boost::geometry;
|
||||
namespace bgi = boost::geometry::index;
|
||||
|
||||
/**
|
||||
* @brief Wrapper class for boost R-Tree functionality
|
||||
*
|
||||
* This class provides a simplified interface to boost library,
|
||||
* handling the creation, management, and querying of R-Tree spatial indexes
|
||||
* for geometric data in Milvus.
|
||||
*/
|
||||
class RTreeIndexWrapper {
|
||||
public:
|
||||
/**
|
||||
* @brief Constructor for RTreeIndexWrapper
|
||||
* @param path Path for storing index files
|
||||
* @param is_build_mode Whether this is for building new index or loading existing one
|
||||
*/
|
||||
explicit RTreeIndexWrapper(std::string& path, bool is_build_mode);
|
||||
|
||||
/**
|
||||
* @brief Destructor
|
||||
*/
|
||||
~RTreeIndexWrapper();
|
||||
|
||||
void
|
||||
add_geometry(const uint8_t* wkb_data, size_t len, int64_t row_offset);
|
||||
|
||||
/**
|
||||
* @brief Bulk load geometries from field data (WKB strings) into a new R-Tree.
|
||||
* This API will create the R-Tree via createAndBulkLoadNewRTree internally.
|
||||
* @param field_datas Vector of field data blocks containing WKB strings
|
||||
* @param nullable Whether the field allows nulls (null rows are skipped but offset still advances)
|
||||
*/
|
||||
void
|
||||
bulk_load_from_field_data(
|
||||
const std::vector<std::shared_ptr<::milvus::FieldDataBase>>&
|
||||
field_datas,
|
||||
bool nullable);
|
||||
|
||||
/**
|
||||
* @brief Finish building the index and flush to disk
|
||||
*/
|
||||
void
|
||||
finish();
|
||||
|
||||
/**
|
||||
* @brief Load existing index from disk
|
||||
*/
|
||||
void
|
||||
load();
|
||||
|
||||
/**
|
||||
* @brief Query candidates based on spatial operation
|
||||
* @param op Spatial operation type
|
||||
* @param query_geom Query geometry
|
||||
* @param candidate_offsets Output vector of candidate row offsets
|
||||
*/
|
||||
void
|
||||
query_candidates(proto::plan::GISFunctionFilterExpr_GISOp op,
|
||||
const GEOSGeometry* query_geom,
|
||||
GEOSContextHandle_t ctx,
|
||||
std::vector<int64_t>& candidate_offsets);
|
||||
|
||||
/**
|
||||
* @brief Get the total number of geometries in the index
|
||||
* @return Number of geometries
|
||||
*/
|
||||
int64_t
|
||||
count() const;
|
||||
|
||||
// Boost rtree does not use index/leaf capacities; keep only fill factor for
|
||||
// compatibility (no-op currently)
|
||||
|
||||
private:
|
||||
/**
|
||||
* @brief Get bounding box from GEOS geometry
|
||||
* @param geom Input geometry
|
||||
* @param ctx GEOS context handle
|
||||
* @param minX Output minimum X coordinate
|
||||
* @param minY Output minimum Y coordinate
|
||||
* @param maxX Output maximum X coordinate
|
||||
* @param maxY Output maximum Y coordinate
|
||||
*/
|
||||
void
|
||||
get_bounding_box(const GEOSGeometry* geom,
|
||||
GEOSContextHandle_t ctx,
|
||||
double& minX,
|
||||
double& minY,
|
||||
double& maxX,
|
||||
double& maxY);
|
||||
|
||||
private:
|
||||
// Boost.Geometry types and in-memory structures
|
||||
using Point = bg::model::point<double, 2, bg::cs::cartesian>;
|
||||
using Box = bg::model::box<Point>;
|
||||
using Value = std::pair<Box, int64_t>; // (MBR, row_offset)
|
||||
using RTree = bgi::rtree<Value, bgi::rstar<16>>;
|
||||
|
||||
RTree rtree_{};
|
||||
std::vector<Value> values_;
|
||||
std::string index_path_;
|
||||
bool is_build_mode_;
|
||||
|
||||
// Flag to guard against repeated invocations which could otherwise attempt to release resources multiple times (e.g. BuildWithRawDataForUT() calls finish(), and Upload() may call it again).
|
||||
bool finished_ = false;
|
||||
|
||||
// Serialize access to rtree_
|
||||
mutable std::shared_mutex rtree_mutex_;
|
||||
|
||||
// R-Tree parameters
|
||||
uint32_t dimension_ = 2;
|
||||
};
|
||||
|
||||
} // namespace milvus::index
|
||||
@ -37,6 +37,7 @@ enum class ScalarIndexType {
|
||||
INVERTED,
|
||||
HYBRID,
|
||||
JSONSTATS,
|
||||
RTREE,
|
||||
};
|
||||
|
||||
inline std::string
|
||||
@ -54,6 +55,8 @@ ToString(ScalarIndexType type) {
|
||||
return "INVERTED";
|
||||
case ScalarIndexType::HYBRID:
|
||||
return "HYBRID";
|
||||
case ScalarIndexType::RTREE:
|
||||
return "RTREE";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
|
||||
@ -62,6 +62,7 @@ class IndexFactory {
|
||||
case DataType::STRING:
|
||||
case DataType::ARRAY:
|
||||
case DataType::JSON:
|
||||
case DataType::GEOMETRY:
|
||||
case DataType::TIMESTAMPTZ:
|
||||
return CreateScalarIndex(type, config, context);
|
||||
|
||||
|
||||
@ -54,6 +54,7 @@ TEST_F(ChunkVectorTest, FillDataWithMmap) {
|
||||
schema->AddDebugField("timestamptz", DataType::TIMESTAMPTZ);
|
||||
auto varchar_field = schema->AddDebugField("varchar", DataType::VARCHAR);
|
||||
auto json_field = schema->AddDebugField("json", DataType::JSON);
|
||||
auto geometry_field = schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
auto int_array_field =
|
||||
schema->AddDebugField("int_array", DataType::ARRAY, DataType::INT8);
|
||||
auto long_array_field =
|
||||
@ -123,6 +124,8 @@ TEST_F(ChunkVectorTest, FillDataWithMmap) {
|
||||
nullptr, varchar_field, ids_ds->GetIds(), num_inserted);
|
||||
auto json_result = segment->bulk_subscript(
|
||||
nullptr, json_field, ids_ds->GetIds(), num_inserted);
|
||||
auto geometry_result = segment->bulk_subscript(
|
||||
nullptr, geometry_field, ids_ds->GetIds(), num_inserted);
|
||||
auto int_array_result = segment->bulk_subscript(
|
||||
nullptr, int_array_field, ids_ds->GetIds(), num_inserted);
|
||||
auto long_array_result = segment->bulk_subscript(
|
||||
@ -161,6 +164,8 @@ TEST_F(ChunkVectorTest, FillDataWithMmap) {
|
||||
EXPECT_EQ(varchar_result->scalars().string_data().data_size(),
|
||||
num_inserted);
|
||||
EXPECT_EQ(json_result->scalars().json_data().data_size(), num_inserted);
|
||||
EXPECT_EQ(geometry_result->scalars().geometry_data().data_size(),
|
||||
num_inserted);
|
||||
EXPECT_EQ(fp32_vec_result->vectors().float_vector().data_size(),
|
||||
num_inserted * dim);
|
||||
EXPECT_EQ(fp16_vec_result->vectors().float16_vector().size(),
|
||||
|
||||
@ -212,7 +212,7 @@ class ChunkedColumnInterface {
|
||||
IsChunkedVariableColumnDataType(DataType data_type) {
|
||||
return data_type == DataType::STRING ||
|
||||
data_type == DataType::VARCHAR || data_type == DataType::TEXT ||
|
||||
data_type == DataType::JSON;
|
||||
data_type == DataType::JSON || data_type == DataType::GEOMETRY;
|
||||
}
|
||||
|
||||
static bool
|
||||
|
||||
@ -14,9 +14,11 @@
|
||||
#include <google/protobuf/text_format.h>
|
||||
|
||||
#include <cstdint>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "common/Geometry.h"
|
||||
#include "common/VectorTrait.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "exec/expression/function/FunctionFactory.h"
|
||||
@ -543,6 +545,19 @@ ProtoParser::ParseValueExprs(const proto::plan::ValueExpr& expr_pb) {
|
||||
return std::make_shared<expr::ValueExpr>(expr_pb.value());
|
||||
}
|
||||
|
||||
expr::TypedExprPtr
|
||||
ProtoParser::ParseGISFunctionFilterExprs(
|
||||
const proto::plan::GISFunctionFilterExpr& expr_pb) {
|
||||
auto& columnInfo = expr_pb.column_info();
|
||||
auto field_id = FieldId(columnInfo.field_id());
|
||||
auto data_type = schema->operator[](field_id).get_data_type();
|
||||
Assert(data_type == (DataType)columnInfo.data_type());
|
||||
|
||||
auto expr = std::make_shared<expr::GISFunctionFilterExpr>(
|
||||
columnInfo, expr_pb.op(), expr_pb.wkt_string(), expr_pb.distance());
|
||||
return expr;
|
||||
}
|
||||
|
||||
expr::TypedExprPtr
|
||||
ProtoParser::CreateAlwaysTrueExprs() {
|
||||
return std::make_shared<expr::AlwaysTrueExpr>();
|
||||
@ -612,6 +627,11 @@ ProtoParser::ParseExprs(const proto::plan::Expr& expr_pb,
|
||||
result = ParseNullExprs(expr_pb.null_expr());
|
||||
break;
|
||||
}
|
||||
case ppe::kGisfunctionFilterExpr: {
|
||||
result =
|
||||
ParseGISFunctionFilterExprs(expr_pb.gisfunction_filter_expr());
|
||||
break;
|
||||
}
|
||||
case ppe::kTimestamptzArithCompareExpr: {
|
||||
result = ParseTimestamptzArithCompareExprs(
|
||||
expr_pb.timestamptz_arith_compare_expr());
|
||||
|
||||
@ -89,6 +89,10 @@ class ProtoParser {
|
||||
expr::TypedExprPtr
|
||||
ParseJsonContainsExprs(const proto::plan::JSONContainsExpr& expr_pb);
|
||||
|
||||
expr::TypedExprPtr
|
||||
ParseGISFunctionFilterExprs(
|
||||
const proto::plan::GISFunctionFilterExpr& expr_pb);
|
||||
|
||||
expr::TypedExprPtr
|
||||
ParseTermExprs(const proto::plan::TermExpr& expr_pb);
|
||||
|
||||
|
||||
@ -1434,6 +1434,15 @@ ChunkedSegmentSealedImpl::ChunkedSegmentSealedImpl(
|
||||
}
|
||||
|
||||
ChunkedSegmentSealedImpl::~ChunkedSegmentSealedImpl() {
|
||||
// Clean up geometry cache for all fields in this segment
|
||||
auto& cache_manager = milvus::exec::SimpleGeometryCacheManager::Instance();
|
||||
cache_manager.RemoveSegmentCaches(ctx_, get_segment_id());
|
||||
|
||||
if (ctx_) {
|
||||
GEOS_finish_r(ctx_);
|
||||
ctx_ = nullptr;
|
||||
}
|
||||
|
||||
if (mmap_descriptor_ != nullptr) {
|
||||
auto mm = storage::MmapManager::GetInstance().GetMmapChunkManager();
|
||||
mm->UnRegister(mmap_descriptor_);
|
||||
@ -1737,6 +1746,17 @@ ChunkedSegmentSealedImpl::get_raw_data(milvus::OpContext* op_ctx,
|
||||
break;
|
||||
}
|
||||
|
||||
case DataType::GEOMETRY: {
|
||||
bulk_subscript_ptr_impl<std::string>(op_ctx,
|
||||
column.get(),
|
||||
seg_offsets,
|
||||
count,
|
||||
ret->mutable_scalars()
|
||||
->mutable_geometry_data()
|
||||
->mutable_data());
|
||||
break;
|
||||
}
|
||||
|
||||
case DataType::ARRAY: {
|
||||
bulk_subscript_array_impl(
|
||||
op_ctx,
|
||||
@ -2465,6 +2485,10 @@ ChunkedSegmentSealedImpl::load_field_data_common(
|
||||
column->ManualEvictCache();
|
||||
}
|
||||
}
|
||||
if (data_type == DataType::GEOMETRY) {
|
||||
// Construct GeometryCache for the entire field
|
||||
LoadGeometryCache(field_id, column);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
@ -2556,6 +2580,11 @@ ChunkedSegmentSealedImpl::fill_empty_field(const FieldMeta& field_meta) {
|
||||
std::move(translator), field_meta);
|
||||
break;
|
||||
}
|
||||
case milvus::DataType::GEOMETRY: {
|
||||
column = std::make_shared<ChunkedVariableColumn<std::string>>(
|
||||
std::move(translator), field_meta);
|
||||
break;
|
||||
}
|
||||
case milvus::DataType::ARRAY: {
|
||||
column = std::make_shared<ChunkedArrayColumn>(std::move(translator),
|
||||
field_meta);
|
||||
@ -2583,4 +2612,50 @@ ChunkedSegmentSealedImpl::fill_empty_field(const FieldMeta& field_meta) {
|
||||
id_);
|
||||
}
|
||||
|
||||
void
|
||||
ChunkedSegmentSealedImpl::LoadGeometryCache(
|
||||
FieldId field_id, const std::shared_ptr<ChunkedColumnInterface>& column) {
|
||||
try {
|
||||
// Get geometry cache for this segment+field
|
||||
auto& geometry_cache =
|
||||
milvus::exec::SimpleGeometryCacheManager::Instance().GetCache(
|
||||
get_segment_id(), field_id);
|
||||
|
||||
// Iterate through all chunks and collect WKB data
|
||||
auto num_chunks = column->num_chunks();
|
||||
for (int64_t chunk_id = 0; chunk_id < num_chunks; ++chunk_id) {
|
||||
// Get all string views from this chunk
|
||||
auto pw = column->StringViews(nullptr, chunk_id);
|
||||
auto [string_views, valid_data] = pw.get();
|
||||
|
||||
// Add each string view to the geometry cache
|
||||
for (size_t i = 0; i < string_views.size(); ++i) {
|
||||
if (valid_data.empty() || valid_data[i]) {
|
||||
// Valid geometry data
|
||||
const auto& wkb_data = string_views[i];
|
||||
geometry_cache.AppendData(
|
||||
ctx_, wkb_data.data(), wkb_data.size());
|
||||
} else {
|
||||
// Null/invalid geometry
|
||||
geometry_cache.AppendData(ctx_, nullptr, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LOG_INFO(
|
||||
"Successfully loaded geometry cache for segment {} field {} with "
|
||||
"{} geometries",
|
||||
get_segment_id(),
|
||||
field_id.get(),
|
||||
geometry_cache.Size());
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
ThrowInfo(UnexpectedError,
|
||||
"Failed to load geometry cache for segment {} field {}: {}",
|
||||
get_segment_id(),
|
||||
field_id.get(),
|
||||
e.what());
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace milvus::segcore
|
||||
|
||||
@ -375,6 +375,11 @@ class ChunkedSegmentSealedImpl : public SegmentSealed {
|
||||
return insert_record_.timestamps_;
|
||||
}
|
||||
|
||||
// Load Geometry cache for a field
|
||||
void
|
||||
LoadGeometryCache(FieldId field_id,
|
||||
const std::shared_ptr<ChunkedColumnInterface>& column);
|
||||
|
||||
private:
|
||||
void
|
||||
load_system_field_internal(FieldId field_id, FieldDataInfo& data);
|
||||
|
||||
@ -115,6 +115,18 @@ VectorBase::set_data_raw(ssize_t element_offset,
|
||||
|
||||
return set_data_raw(element_offset, data_raw.data(), element_count);
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
// get the geometry array of a column from proto message
|
||||
auto& geometry_data = FIELD_DATA(data, geometry);
|
||||
std::vector<std::string> data_raw{};
|
||||
data_raw.reserve(geometry_data.size());
|
||||
for (auto& geometry_bytes : geometry_data) {
|
||||
//this geometry_bytes consider as wkt strings from milvus-proto
|
||||
data_raw.emplace_back(
|
||||
std::string(geometry_bytes.data(), geometry_bytes.size()));
|
||||
}
|
||||
return set_data_raw(element_offset, data_raw.data(), element_count);
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
auto& array_data = FIELD_DATA(data, array);
|
||||
std::vector<Array> data_raw{};
|
||||
|
||||
@ -16,11 +16,15 @@
|
||||
#include "common/Types.h"
|
||||
#include "fmt/format.h"
|
||||
#include "index/ScalarIndexSort.h"
|
||||
#include "index/StringIndexMarisa.h"
|
||||
|
||||
#include "common/SystemProperty.h"
|
||||
#include "segcore/FieldIndexing.h"
|
||||
#include "index/VectorMemIndex.h"
|
||||
#include "IndexConfigGenerator.h"
|
||||
#include "index/RTreeIndex.h"
|
||||
#include "storage/FileManager.h"
|
||||
#include "storage/LocalChunkManagerSingleton.h"
|
||||
|
||||
namespace milvus::segcore {
|
||||
using std::unique_ptr;
|
||||
@ -326,6 +330,229 @@ VectorFieldIndexing::has_raw_data() const {
|
||||
return index_->HasRawData();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
ScalarFieldIndexing<T>::ScalarFieldIndexing(
|
||||
const FieldMeta& field_meta,
|
||||
const FieldIndexMeta& field_index_meta,
|
||||
int64_t segment_max_row_count,
|
||||
const SegcoreConfig& segcore_config,
|
||||
const VectorBase* field_raw_data)
|
||||
: FieldIndexing(field_meta, segcore_config),
|
||||
built_(false),
|
||||
sync_with_index_(false),
|
||||
config_(std::make_unique<FieldIndexMeta>(field_index_meta)) {
|
||||
recreate_index(field_meta, field_raw_data);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
ScalarFieldIndexing<T>::recreate_index(const FieldMeta& field_meta,
|
||||
const VectorBase* field_raw_data) {
|
||||
if constexpr (std::is_same_v<T, std::string>) {
|
||||
if (field_meta.get_data_type() == DataType::GEOMETRY) {
|
||||
// Create chunk manager for file operations
|
||||
auto chunk_manager =
|
||||
milvus::storage::LocalChunkManagerSingleton::GetInstance()
|
||||
.GetChunkManager();
|
||||
|
||||
// Create FieldDataMeta for RTree index
|
||||
storage::FieldDataMeta field_data_meta;
|
||||
field_data_meta.field_id = field_meta.get_id().get();
|
||||
|
||||
// Create a minimal field schema from FieldMeta
|
||||
field_data_meta.field_schema.set_fieldid(field_meta.get_id().get());
|
||||
field_data_meta.field_schema.set_name(field_meta.get_name().get());
|
||||
field_data_meta.field_schema.set_data_type(
|
||||
static_cast<proto::schema::DataType>(
|
||||
field_meta.get_data_type()));
|
||||
field_data_meta.field_schema.set_nullable(field_meta.is_nullable());
|
||||
|
||||
// Create IndexMeta for RTree index
|
||||
storage::IndexMeta index_meta;
|
||||
index_meta.segment_id = 0;
|
||||
index_meta.field_id = field_meta.get_id().get();
|
||||
index_meta.build_id = 0;
|
||||
index_meta.index_version = 1;
|
||||
index_meta.key = "rtree_index";
|
||||
index_meta.field_name = field_meta.get_name().get();
|
||||
index_meta.field_type = field_meta.get_data_type();
|
||||
index_meta.index_non_encoding = false;
|
||||
|
||||
// Create FileManagerContext with all required components
|
||||
storage::FileManagerContext ctx(
|
||||
field_data_meta, index_meta, chunk_manager);
|
||||
|
||||
index_ = std::make_unique<index::RTreeIndex<std::string>>(ctx);
|
||||
built_ = false;
|
||||
sync_with_index_ = false;
|
||||
index_cur_ = 0;
|
||||
LOG_INFO(
|
||||
"Created R-Tree index for geometry data type: {} with "
|
||||
"FileManagerContext",
|
||||
field_meta.get_data_type());
|
||||
return;
|
||||
}
|
||||
index_ = index::CreateStringIndexMarisa();
|
||||
} else {
|
||||
index_ = index::CreateScalarIndexSort<T>();
|
||||
}
|
||||
|
||||
built_ = false;
|
||||
sync_with_index_ = false;
|
||||
index_cur_ = 0;
|
||||
|
||||
LOG_INFO("Created scalar index for data type: {}",
|
||||
field_meta.get_data_type());
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
ScalarFieldIndexing<T>::AppendSegmentIndex(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const DataArray* stream_data) {
|
||||
// Special handling for geometry fields (stored as std::string)
|
||||
if constexpr (std::is_same_v<T, std::string>) {
|
||||
if (get_data_type() == DataType::GEOMETRY) {
|
||||
// Extract geometry data from stream_data
|
||||
if (stream_data->has_scalars() &&
|
||||
stream_data->scalars().has_geometry_data()) {
|
||||
const auto& geometry_array =
|
||||
stream_data->scalars().geometry_data();
|
||||
const auto& valid_data = stream_data->valid_data();
|
||||
|
||||
// Create accessor for DataArray
|
||||
auto accessor = [&geometry_array, &valid_data](
|
||||
int64_t i) -> std::pair<std::string, bool> {
|
||||
bool is_valid = valid_data.empty() || valid_data[i];
|
||||
if (is_valid && i < geometry_array.data_size()) {
|
||||
return {geometry_array.data(i), true};
|
||||
}
|
||||
return {"", false};
|
||||
};
|
||||
|
||||
process_geometry_data(
|
||||
reserved_offset, size, vec_base, accessor, "DataArray");
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// For other scalar fields, not implemented yet
|
||||
ThrowInfo(Unsupported,
|
||||
"ScalarFieldIndexing::AppendSegmentIndex from DataArray not "
|
||||
"implemented for non-geometry scalar fields. Type: {}",
|
||||
get_data_type());
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void
|
||||
ScalarFieldIndexing<T>::AppendSegmentIndex(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const FieldDataPtr& field_data) {
|
||||
// Special handling for geometry fields (stored as std::string)
|
||||
if constexpr (std::is_same_v<T, std::string>) {
|
||||
if (get_data_type() == DataType::GEOMETRY) {
|
||||
// Extract geometry data from field_data
|
||||
const void* raw_data = field_data->Data();
|
||||
if (raw_data) {
|
||||
const auto* string_array =
|
||||
static_cast<const std::string*>(raw_data);
|
||||
|
||||
// Create accessor for FieldDataPtr
|
||||
auto accessor = [field_data, string_array](
|
||||
int64_t i) -> std::pair<std::string, bool> {
|
||||
bool is_valid = field_data->is_valid(i);
|
||||
if (is_valid) {
|
||||
return {string_array[i], true};
|
||||
}
|
||||
return {"", false};
|
||||
};
|
||||
|
||||
process_geometry_data(
|
||||
reserved_offset, size, vec_base, accessor, "FieldData");
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// For other scalar fields, not implemented yet
|
||||
ThrowInfo(Unsupported,
|
||||
"ScalarFieldIndexing::AppendSegmentIndex from FieldDataPtr not "
|
||||
"implemented for non-geometry scalar fields. Type: {}",
|
||||
get_data_type());
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
template <typename GeometryDataAccessor>
|
||||
void
|
||||
ScalarFieldIndexing<T>::process_geometry_data(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
GeometryDataAccessor&& accessor,
|
||||
const std::string& log_source) {
|
||||
// Special handling for geometry fields (stored as std::string)
|
||||
if constexpr (std::is_same_v<T, std::string>) {
|
||||
if (get_data_type() == DataType::GEOMETRY) {
|
||||
// Cast to R-Tree index for geometry data
|
||||
auto* rtree_index =
|
||||
dynamic_cast<index::RTreeIndex<std::string>*>(index_.get());
|
||||
if (!rtree_index) {
|
||||
ThrowInfo(UnexpectedError,
|
||||
"Failed to cast to R-Tree index for geometry field");
|
||||
}
|
||||
|
||||
// Initialize R-Tree index on first data arrival (no threshold waiting)
|
||||
if (!built_) {
|
||||
try {
|
||||
// Initialize R-Tree for building immediately when first data arrives
|
||||
rtree_index->InitForBuildIndex();
|
||||
built_ = true;
|
||||
sync_with_index_ = true;
|
||||
LOG_INFO(
|
||||
"Initialized R-Tree index for immediate incremental "
|
||||
"building from {}",
|
||||
log_source);
|
||||
} catch (std::exception& error) {
|
||||
ThrowInfo(UnexpectedError,
|
||||
"R-Tree index initialization error: {}",
|
||||
error.what());
|
||||
}
|
||||
}
|
||||
|
||||
// Always add geometries incrementally (no bulk build phase)
|
||||
int64_t added_count = 0;
|
||||
for (int64_t i = 0; i < size; ++i) {
|
||||
int64_t global_offset = reserved_offset + i;
|
||||
|
||||
// Use the accessor to get geometry data and validity
|
||||
auto [wkb_data, is_valid] = accessor(i);
|
||||
|
||||
if (is_valid) {
|
||||
try {
|
||||
rtree_index->AddGeometry(wkb_data, global_offset);
|
||||
added_count++;
|
||||
} catch (std::exception& error) {
|
||||
ThrowInfo(UnexpectedError,
|
||||
"Failed to add geometry at offset {}: {}",
|
||||
global_offset,
|
||||
error.what());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update statistics
|
||||
index_cur_.fetch_add(added_count);
|
||||
sync_with_index_.store(true);
|
||||
|
||||
LOG_INFO("Added {} geometries to R-Tree index immediately from {}",
|
||||
added_count,
|
||||
log_source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<FieldIndexing>
|
||||
CreateIndex(const FieldMeta& field_meta,
|
||||
const FieldIndexMeta& field_index_meta,
|
||||
@ -377,6 +604,13 @@ CreateIndex(const FieldMeta& field_meta,
|
||||
case DataType::VARCHAR:
|
||||
return std::make_unique<ScalarFieldIndexing<std::string>>(
|
||||
field_meta, segcore_config);
|
||||
case DataType::GEOMETRY:
|
||||
return std::make_unique<ScalarFieldIndexing<std::string>>(
|
||||
field_meta,
|
||||
field_index_meta,
|
||||
segment_max_row_count,
|
||||
segcore_config,
|
||||
field_raw_data);
|
||||
default:
|
||||
ThrowInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported scalar type in index: {}",
|
||||
@ -384,4 +618,7 @@ CreateIndex(const FieldMeta& field_meta,
|
||||
}
|
||||
}
|
||||
|
||||
// Explicit template instantiation for ScalarFieldIndexing
|
||||
template class ScalarFieldIndexing<std::string>;
|
||||
|
||||
} // namespace milvus::segcore
|
||||
|
||||
@ -68,6 +68,20 @@ class FieldIndexing {
|
||||
const VectorBase* vec_base,
|
||||
const void* data_source) = 0;
|
||||
|
||||
// For scalar fields (including geometry), append data incrementally
|
||||
virtual void
|
||||
AppendSegmentIndex(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const DataArray* stream_data) = 0;
|
||||
|
||||
// For scalar fields (including geometry), append data incrementally (FieldDataPtr version)
|
||||
virtual void
|
||||
AppendSegmentIndex(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const FieldDataPtr& field_data) = 0;
|
||||
|
||||
virtual void
|
||||
GetDataFromIndex(const int64_t* seg_offsets,
|
||||
int64_t count,
|
||||
@ -118,6 +132,12 @@ class ScalarFieldIndexing : public FieldIndexing {
|
||||
public:
|
||||
using FieldIndexing::FieldIndexing;
|
||||
|
||||
explicit ScalarFieldIndexing(const FieldMeta& field_meta,
|
||||
const FieldIndexMeta& field_index_meta,
|
||||
int64_t segment_max_row_count,
|
||||
const SegcoreConfig& segcore_config,
|
||||
const VectorBase* field_raw_data);
|
||||
|
||||
void
|
||||
AppendSegmentIndexDense(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
@ -137,6 +157,18 @@ class ScalarFieldIndexing : public FieldIndexing {
|
||||
"scalar index doesn't support append vector segment index");
|
||||
}
|
||||
|
||||
void
|
||||
AppendSegmentIndex(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const DataArray* stream_data) override;
|
||||
|
||||
void
|
||||
AppendSegmentIndex(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const FieldDataPtr& field_data) override;
|
||||
|
||||
void
|
||||
GetDataFromIndex(const int64_t* seg_offsets,
|
||||
int64_t count,
|
||||
@ -146,6 +178,11 @@ class ScalarFieldIndexing : public FieldIndexing {
|
||||
"scalar index don't support get data from index");
|
||||
}
|
||||
|
||||
bool
|
||||
has_raw_data() const override {
|
||||
return index_->HasRawData();
|
||||
}
|
||||
|
||||
int64_t
|
||||
get_build_threshold() const override {
|
||||
return 0;
|
||||
@ -153,6 +190,20 @@ class ScalarFieldIndexing : public FieldIndexing {
|
||||
|
||||
bool
|
||||
sync_data_with_index() const override {
|
||||
// For geometry fields, check if index is built and synchronized
|
||||
if constexpr (std::is_same_v<T, std::string>) {
|
||||
if (data_type_ == DataType::GEOMETRY) {
|
||||
bool is_built = built_.load();
|
||||
bool is_synced = sync_with_index_.load();
|
||||
LOG_DEBUG(
|
||||
"ScalarFieldIndexing::sync_data_with_index for geometry "
|
||||
"field: built={}, synced={}",
|
||||
is_built,
|
||||
is_synced);
|
||||
return is_built && is_synced;
|
||||
}
|
||||
}
|
||||
// For other scalar fields, not supported yet
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -165,10 +216,44 @@ class ScalarFieldIndexing : public FieldIndexing {
|
||||
|
||||
PinWrapper<index::IndexBase*>
|
||||
get_segment_indexing() const override {
|
||||
// For geometry fields, return the single index
|
||||
if constexpr (std::is_same_v<T, std::string>) {
|
||||
if (data_type_ == DataType::GEOMETRY) {
|
||||
return index_.get();
|
||||
}
|
||||
}
|
||||
// For other scalar fields, not supported yet
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
private:
|
||||
void
|
||||
recreate_index(const FieldMeta& field_meta,
|
||||
const VectorBase* field_raw_data);
|
||||
|
||||
// Helper function to process geometry data and add to R-Tree index
|
||||
template <typename GeometryDataAccessor>
|
||||
void
|
||||
process_geometry_data(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
GeometryDataAccessor&& accessor,
|
||||
const std::string& log_source);
|
||||
|
||||
// current number of rows in index.
|
||||
std::atomic<idx_t> index_cur_ = 0;
|
||||
// whether the growing index has been built.
|
||||
std::atomic<bool> built_ = false;
|
||||
// whether all inserted data has been added to growing index and can be searched.
|
||||
std::atomic<bool> sync_with_index_ = false;
|
||||
|
||||
// Configuration for scalar index building
|
||||
std::unique_ptr<FieldIndexMeta> config_;
|
||||
|
||||
// Single scalar index for incremental indexing (new approach)
|
||||
std::unique_ptr<index::ScalarIndex<T>> index_;
|
||||
|
||||
// Chunk-based indexes for compatibility (old approach)
|
||||
tbb::concurrent_vector<index::ScalarIndexPtr<T>> data_;
|
||||
};
|
||||
|
||||
@ -195,6 +280,24 @@ class VectorFieldIndexing : public FieldIndexing {
|
||||
const VectorBase* field_raw_data,
|
||||
const void* data_source) override;
|
||||
|
||||
void
|
||||
AppendSegmentIndex(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const DataArray* stream_data) override {
|
||||
ThrowInfo(Unsupported,
|
||||
"vector index should use AppendSegmentIndexDense/Sparse");
|
||||
}
|
||||
|
||||
void
|
||||
AppendSegmentIndex(int64_t reserved_offset,
|
||||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const FieldDataPtr& field_data) override {
|
||||
ThrowInfo(Unsupported,
|
||||
"vector index should use AppendSegmentIndexDense/Sparse");
|
||||
}
|
||||
|
||||
// for sparse float vector:
|
||||
// * element_size is not used
|
||||
// * output_raw pooints at a milvus::schema::proto::SparseFloatArray.
|
||||
@ -306,6 +409,26 @@ class IndexingRecord {
|
||||
field_raw_data));
|
||||
}
|
||||
}
|
||||
} else if (field_meta.get_data_type() == DataType::GEOMETRY) {
|
||||
if (index_meta_ == nullptr) {
|
||||
LOG_INFO("miss index meta for growing interim index");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (index_meta_->GetIndexMaxRowCount() > 0 &&
|
||||
index_meta_->HasField(field_id)) {
|
||||
auto geo_field_meta =
|
||||
index_meta_->GetFieldIndexMeta(field_id);
|
||||
auto field_raw_data =
|
||||
insert_record->get_data_base(field_id);
|
||||
field_indexings_.try_emplace(
|
||||
field_id,
|
||||
CreateIndex(field_meta,
|
||||
geo_field_meta,
|
||||
index_meta_->GetIndexMaxRowCount(),
|
||||
segcore_config_,
|
||||
field_raw_data));
|
||||
}
|
||||
}
|
||||
}
|
||||
assert(offset_id == schema_.size());
|
||||
@ -354,6 +477,10 @@ class IndexingRecord {
|
||||
stream_data->vectors().sparse_float_vector().dim(),
|
||||
field_raw_data,
|
||||
data.get());
|
||||
} else if (type == DataType::GEOMETRY) {
|
||||
// For geometry fields, append data incrementally to RTree index
|
||||
indexing->AppendSegmentIndex(
|
||||
reserved_offset, size, field_raw_data, stream_data);
|
||||
}
|
||||
}
|
||||
|
||||
@ -388,6 +515,10 @@ class IndexingRecord {
|
||||
->Dim(),
|
||||
vec_base,
|
||||
p);
|
||||
} else if (type == DataType::GEOMETRY) {
|
||||
// For geometry fields, append data incrementally to RTree index
|
||||
auto vec_base = record.get_data_base(fieldId);
|
||||
indexing->AppendSegmentIndex(reserved_offset, size, vec_base, data);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -901,6 +901,11 @@ class InsertRecordGrowing {
|
||||
field_id, size_per_chunk, scalar_mmap_descriptor);
|
||||
return;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
this->append_data<std::string>(
|
||||
field_id, size_per_chunk, scalar_mmap_descriptor);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
ThrowInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported scalar type",
|
||||
|
||||
@ -149,6 +149,21 @@ AssembleGroupByValues(
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
mutable_group_by_field_value->set_type(
|
||||
milvus::proto::schema::DataType::Geometry);
|
||||
auto field_data = group_by_res_values->mutable_geometry_data();
|
||||
for (std::size_t idx = 0; idx < group_by_val_size; idx++) {
|
||||
if (group_by_vals[idx].has_value()) {
|
||||
std::string val =
|
||||
std::get<std::string>(group_by_vals[idx].value());
|
||||
*(field_data->mutable_data()->Add()) = val;
|
||||
} else {
|
||||
valid_data->Set(idx, false);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::JSON: {
|
||||
auto json_path = plan->plan_node_->search_info_.json_path_;
|
||||
auto json_type = plan->plan_node_->search_info_.json_type_;
|
||||
|
||||
@ -225,6 +225,14 @@ SegmentGrowingImpl::Insert(int64_t reserved_offset,
|
||||
field_id, num_rows, field_data_size);
|
||||
}
|
||||
|
||||
// Build geometry cache for GEOMETRY fields
|
||||
if (field_meta.get_data_type() == DataType::GEOMETRY) {
|
||||
BuildGeometryCacheForInsert(
|
||||
field_id,
|
||||
&insert_record_proto->fields_data(data_offset),
|
||||
num_rows);
|
||||
}
|
||||
|
||||
stats_.mem_size += field_data_size;
|
||||
|
||||
try_remove_chunks(field_id);
|
||||
@ -516,6 +524,11 @@ SegmentGrowingImpl::load_column_group_data_internal(
|
||||
field_data,
|
||||
primary_field_id,
|
||||
num_rows);
|
||||
// Build geometry cache for GEOMETRY fields
|
||||
if (schema_->operator[](field_id).get_data_type() ==
|
||||
DataType::GEOMETRY) {
|
||||
BuildGeometryCacheForLoad(field_id, field_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -931,6 +944,16 @@ SegmentGrowingImpl::bulk_subscript(milvus::OpContext* op_ctx,
|
||||
result->mutable_scalars()->mutable_json_data()->mutable_data());
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
bulk_subscript_ptr_impl<std::string>(op_ctx,
|
||||
vec_ptr,
|
||||
seg_offsets,
|
||||
count,
|
||||
result->mutable_scalars()
|
||||
->mutable_geometry_data()
|
||||
->mutable_data());
|
||||
break;
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
// element
|
||||
bulk_subscript_array_impl(op_ctx,
|
||||
@ -1274,7 +1297,8 @@ void
|
||||
SegmentGrowingImpl::LazyCheckSchema(SchemaPtr sch) {
|
||||
if (sch->get_schema_version() > schema_->get_schema_version()) {
|
||||
LOG_INFO(
|
||||
"lazy check schema segment {} found newer schema version, current "
|
||||
"lazy check schema segment {} found newer schema version, "
|
||||
"current "
|
||||
"schema version {}, new schema version {}",
|
||||
id_,
|
||||
schema_->get_schema_version(),
|
||||
@ -1342,4 +1366,99 @@ SegmentGrowingImpl::fill_empty_field(const FieldMeta& field_meta) {
|
||||
id_);
|
||||
}
|
||||
|
||||
void
|
||||
SegmentGrowingImpl::BuildGeometryCacheForInsert(FieldId field_id,
|
||||
const DataArray* data_array,
|
||||
int64_t num_rows) {
|
||||
try {
|
||||
// Get geometry cache for this segment+field
|
||||
auto& geometry_cache =
|
||||
milvus::exec::SimpleGeometryCacheManager::Instance().GetCache(
|
||||
get_segment_id(), field_id);
|
||||
|
||||
// Process geometry data from DataArray
|
||||
const auto& geometry_data = data_array->scalars().geometry_data();
|
||||
const auto& valid_data = data_array->valid_data();
|
||||
|
||||
for (int64_t i = 0; i < num_rows; ++i) {
|
||||
if (valid_data.empty() ||
|
||||
(i < valid_data.size() && valid_data[i])) {
|
||||
// Valid geometry data
|
||||
const auto& wkb_data = geometry_data.data(i);
|
||||
geometry_cache.AppendData(
|
||||
ctx_, wkb_data.data(), wkb_data.size());
|
||||
} else {
|
||||
// Null/invalid geometry
|
||||
geometry_cache.AppendData(ctx_, nullptr, 0);
|
||||
}
|
||||
}
|
||||
|
||||
LOG_INFO(
|
||||
"Successfully appended {} geometries to cache for growing "
|
||||
"segment "
|
||||
"{} field {}",
|
||||
num_rows,
|
||||
get_segment_id(),
|
||||
field_id.get());
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
ThrowInfo(UnexpectedError,
|
||||
"Failed to build geometry cache for growing segment {} field "
|
||||
"{} insert: {}",
|
||||
get_segment_id(),
|
||||
field_id.get(),
|
||||
e.what());
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
SegmentGrowingImpl::BuildGeometryCacheForLoad(
|
||||
FieldId field_id, const std::vector<FieldDataPtr>& field_data) {
|
||||
try {
|
||||
// Get geometry cache for this segment+field
|
||||
auto& geometry_cache =
|
||||
milvus::exec::SimpleGeometryCacheManager::Instance().GetCache(
|
||||
get_segment_id(), field_id);
|
||||
|
||||
// Process each field data chunk
|
||||
for (const auto& data : field_data) {
|
||||
auto num_rows = data->get_num_rows();
|
||||
|
||||
for (int64_t i = 0; i < num_rows; ++i) {
|
||||
if (data->is_valid(i)) {
|
||||
// Valid geometry data
|
||||
auto wkb_data =
|
||||
static_cast<const std::string*>(data->RawValue(i));
|
||||
geometry_cache.AppendData(
|
||||
ctx_, wkb_data->data(), wkb_data->size());
|
||||
} else {
|
||||
// Null/invalid geometry
|
||||
geometry_cache.AppendData(ctx_, nullptr, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
size_t total_rows = 0;
|
||||
for (const auto& data : field_data) {
|
||||
total_rows += data->get_num_rows();
|
||||
}
|
||||
|
||||
LOG_INFO(
|
||||
"Successfully loaded {} geometries to cache for growing "
|
||||
"segment {} "
|
||||
"field {}",
|
||||
total_rows,
|
||||
get_segment_id(),
|
||||
field_id.get());
|
||||
|
||||
} catch (const std::exception& e) {
|
||||
ThrowInfo(UnexpectedError,
|
||||
"Failed to build geometry cache for growing segment {} field "
|
||||
"{} load: {}",
|
||||
get_segment_id(),
|
||||
field_id.get(),
|
||||
e.what());
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace milvus::segcore
|
||||
|
||||
@ -33,6 +33,7 @@
|
||||
#include "common/IndexMeta.h"
|
||||
#include "common/Types.h"
|
||||
#include "query/PlanNode.h"
|
||||
#include "common/GeometryCache.h"
|
||||
|
||||
namespace milvus::segcore {
|
||||
|
||||
@ -108,6 +109,18 @@ class SegmentGrowingImpl : public SegmentGrowing {
|
||||
void
|
||||
FinishLoad() override;
|
||||
|
||||
private:
|
||||
// Build geometry cache for inserted data
|
||||
void
|
||||
BuildGeometryCacheForInsert(FieldId field_id,
|
||||
const DataArray* data_array,
|
||||
int64_t num_rows);
|
||||
|
||||
// Build geometry cache for loaded field data
|
||||
void
|
||||
BuildGeometryCacheForLoad(FieldId field_id,
|
||||
const std::vector<FieldDataPtr>& field_data);
|
||||
|
||||
public:
|
||||
const InsertRecord<false>&
|
||||
get_insert_record() const {
|
||||
@ -320,6 +333,17 @@ class SegmentGrowingImpl : public SegmentGrowing {
|
||||
}
|
||||
|
||||
~SegmentGrowingImpl() {
|
||||
// Clean up geometry cache for all fields in this segment
|
||||
auto& cache_manager =
|
||||
milvus::exec::SimpleGeometryCacheManager::Instance();
|
||||
cache_manager.RemoveSegmentCaches(ctx_, get_segment_id());
|
||||
|
||||
if (ctx_) {
|
||||
GEOS_finish_r(ctx_);
|
||||
ctx_ = nullptr;
|
||||
}
|
||||
|
||||
// Original mmap cleanup logic
|
||||
if (mmap_descriptor_ != nullptr) {
|
||||
auto mcm =
|
||||
storage::MmapManager::GetInstance().GetMmapChunkManager();
|
||||
@ -357,7 +381,8 @@ class SegmentGrowingImpl : public SegmentGrowing {
|
||||
bool
|
||||
HasIndex(FieldId field_id) const {
|
||||
auto& field_meta = schema_->operator[](field_id);
|
||||
if (IsVectorDataType(field_meta.get_data_type()) &&
|
||||
if ((IsVectorDataType(field_meta.get_data_type()) ||
|
||||
IsGeometryType(field_meta.get_data_type())) &&
|
||||
indexing_record_.SyncDataWithIndex(field_id)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -150,6 +150,7 @@ TEST_P(GrowingTest, FillData) {
|
||||
auto double_field = schema->AddDebugField("double", DataType::DOUBLE);
|
||||
auto varchar_field = schema->AddDebugField("varchar", DataType::VARCHAR);
|
||||
auto json_field = schema->AddDebugField("json", DataType::JSON);
|
||||
auto geometry_field = schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
auto int_array_field =
|
||||
schema->AddDebugField("int_array", DataType::ARRAY, DataType::INT8);
|
||||
auto long_array_field =
|
||||
@ -215,6 +216,8 @@ TEST_P(GrowingTest, FillData) {
|
||||
nullptr, varchar_field, ids_ds->GetIds(), num_inserted);
|
||||
auto json_result = segment->bulk_subscript(
|
||||
nullptr, json_field, ids_ds->GetIds(), num_inserted);
|
||||
auto geometry_result = segment->bulk_subscript(
|
||||
nullptr, geometry_field, ids_ds->GetIds(), num_inserted);
|
||||
auto int_array_result = segment->bulk_subscript(
|
||||
nullptr, int_array_field, ids_ds->GetIds(), num_inserted);
|
||||
auto long_array_result = segment->bulk_subscript(
|
||||
@ -245,6 +248,8 @@ TEST_P(GrowingTest, FillData) {
|
||||
EXPECT_EQ(varchar_result->scalars().string_data().data_size(),
|
||||
num_inserted);
|
||||
EXPECT_EQ(json_result->scalars().json_data().data_size(), num_inserted);
|
||||
EXPECT_EQ(geometry_result->scalars().geometry_data().data_size(),
|
||||
num_inserted);
|
||||
if (data_type == DataType::VECTOR_FLOAT) {
|
||||
EXPECT_EQ(vec_result->vectors().float_vector().data_size(),
|
||||
num_inserted * dim);
|
||||
|
||||
@ -558,6 +558,16 @@ SegmentInternalInterface::bulk_subscript_not_exist_field(
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
auto data_ptr = result->mutable_scalars()
|
||||
->mutable_geometry_data()
|
||||
->mutable_data();
|
||||
|
||||
for (int64_t i = 0; i < count; ++i) {
|
||||
data_ptr->at(i) = field_meta.default_value()->bytes_data();
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
ThrowInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported default value type {}",
|
||||
|
||||
@ -579,6 +579,11 @@ class SegmentInternalInterface : public SegmentInterface {
|
||||
const PkType& pk,
|
||||
BitsetTypeView& bitset) const = 0;
|
||||
|
||||
virtual GEOSContextHandle_t
|
||||
get_ctx() const {
|
||||
return ctx_;
|
||||
};
|
||||
|
||||
protected:
|
||||
// mutex protecting rw options on schema_
|
||||
std::shared_mutex sch_mutex_;
|
||||
@ -597,6 +602,8 @@ class SegmentInternalInterface : public SegmentInterface {
|
||||
mutable folly::Synchronized<
|
||||
std::unordered_map<FieldId, index::CacheJsonKeyStatsPtr>>
|
||||
json_stats_;
|
||||
|
||||
GEOSContextHandle_t ctx_ = GEOS_init_r();
|
||||
};
|
||||
|
||||
} // namespace milvus::segcore
|
||||
|
||||
@ -21,7 +21,9 @@
|
||||
#include "common/type_c.h"
|
||||
#include "common/Common.h"
|
||||
#include "common/FieldData.h"
|
||||
#include "common/FieldDataInterface.h"
|
||||
#include "common/Types.h"
|
||||
#include "common/Utils.h"
|
||||
#include "index/ScalarIndex.h"
|
||||
#include "log/Log.h"
|
||||
#include "storage/DataCodec.h"
|
||||
@ -148,6 +150,13 @@ GetRawDataSizeOfDataArray(const DataArray* data,
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
auto& geometry_data = FIELD_DATA(data, geometry);
|
||||
for (auto& geometry_bytes : geometry_data) {
|
||||
result += geometry_bytes.size();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
auto& array_data = FIELD_DATA(data, array);
|
||||
switch (field_meta.get_element_type()) {
|
||||
@ -326,6 +335,14 @@ CreateEmptyScalarDataArray(int64_t count, const FieldMeta& field_meta) {
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
auto obj = scalar_array->mutable_geometry_data();
|
||||
obj->mutable_data()->Reserve(count);
|
||||
for (int i = 0; i < count; i++) {
|
||||
*(obj->mutable_data()->Add()) = std::string();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
auto obj = scalar_array->mutable_array_data();
|
||||
obj->mutable_data()->Reserve(count);
|
||||
@ -497,6 +514,15 @@ CreateScalarDataArrayFrom(const void* data_raw,
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
auto data = reinterpret_cast<const std::string*>(data_raw);
|
||||
auto obj = scalar_array->mutable_geometry_data();
|
||||
for (auto i = 0; i < count; i++) {
|
||||
*(obj->mutable_data()->Add()) =
|
||||
std::string(data[i].data(), data[i].size());
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
auto data = reinterpret_cast<const ScalarFieldProto*>(data_raw);
|
||||
auto obj = scalar_array->mutable_array_data();
|
||||
@ -758,6 +784,13 @@ MergeDataArray(std::vector<MergeBase>& merge_bases,
|
||||
*(obj->mutable_data()->Add()) = data[src_offset];
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
auto& data = FIELD_DATA(src_field_data, geometry);
|
||||
auto obj = scalar_array->mutable_geometry_data();
|
||||
*(obj->mutable_data()->Add()) = std::string(
|
||||
data[src_offset].data(), data[src_offset].size());
|
||||
break;
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
auto& data = FIELD_DATA(src_field_data, array);
|
||||
auto obj = scalar_array->mutable_array_data();
|
||||
@ -974,6 +1007,26 @@ ReverseDataFromIndex(const index::IndexBase* index,
|
||||
*(obj->mutable_data()) = {raw_data.begin(), raw_data.end()};
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
using IndexType = index::ScalarIndex<std::string>;
|
||||
auto ptr = dynamic_cast<const IndexType*>(index);
|
||||
std::vector<std::string> raw_data(count);
|
||||
for (int64_t i = 0; i < count; ++i) {
|
||||
auto raw = ptr->Reverse_Lookup(seg_offsets[i]);
|
||||
// if has no value, means nullable must be true, no need to check nullable again here
|
||||
if (!raw.has_value()) {
|
||||
valid_data[i] = false;
|
||||
continue;
|
||||
}
|
||||
if (nullable) {
|
||||
valid_data[i] = true;
|
||||
}
|
||||
raw_data[i] = raw.value();
|
||||
}
|
||||
auto obj = scalar_array->mutable_geometry_data();
|
||||
*(obj->mutable_data()) = {raw_data.begin(), raw_data.end()};
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
ThrowInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported datatype {}", data_type));
|
||||
|
||||
@ -42,6 +42,7 @@
|
||||
#include "exec/expression/ExprCache.h"
|
||||
#include "monitor/Monitor.h"
|
||||
#include "segcore/storagev2translator/JsonStatsTranslator.h"
|
||||
#include "common/GeometryCache.h"
|
||||
|
||||
////////////////////////////// common interfaces //////////////////////////////
|
||||
CStatus
|
||||
|
||||
@ -18,6 +18,7 @@
|
||||
#include <optional>
|
||||
#include <string>
|
||||
|
||||
#include "common/Geometry.h"
|
||||
#include "storage/DataCodec.h"
|
||||
#include "storage/InsertData.h"
|
||||
#include "storage/IndexData.h"
|
||||
@ -353,6 +354,116 @@ TEST(storage, InsertDataInt64Nullable) {
|
||||
delete[] valid_data;
|
||||
}
|
||||
|
||||
TEST(storage, InsertDataGeometry) {
|
||||
auto ctx = GEOS_init_r();
|
||||
|
||||
// Define geometries using WKT strings directly
|
||||
const char* point_wkt = "POINT (10.25 0.55)";
|
||||
const char* linestring_wkt =
|
||||
"LINESTRING (10.25 0.55, 9.75 -0.23, -8.50 1.44)";
|
||||
const char* polygon_wkt =
|
||||
"POLYGON ((10.25 0.55, 9.75 -0.23, -8.50 1.44, 10.25 0.55))";
|
||||
|
||||
std::string str1, str2, str3;
|
||||
str1 = Geometry(ctx, point_wkt).to_wkb_string();
|
||||
str2 = Geometry(ctx, linestring_wkt).to_wkb_string();
|
||||
str3 = Geometry(ctx, polygon_wkt).to_wkb_string();
|
||||
|
||||
GEOS_finish_r(ctx);
|
||||
FixedVector<std::string> data = {str1, str2, str3};
|
||||
auto field_data = milvus::storage::CreateFieldData(
|
||||
storage::DataType::GEOMETRY, storage::DataType::NONE, false);
|
||||
field_data->FillFieldData(data.data(), data.size());
|
||||
auto payload_reader =
|
||||
std::make_shared<milvus::storage::PayloadReader>(field_data);
|
||||
storage::InsertData insert_data(payload_reader);
|
||||
storage::FieldDataMeta field_data_meta{100, 101, 102, 103};
|
||||
insert_data.SetFieldDataMeta(field_data_meta);
|
||||
insert_data.SetTimestamps(0, 100);
|
||||
|
||||
auto serialized_bytes = insert_data.Serialize(storage::StorageType::Remote);
|
||||
std::shared_ptr<uint8_t[]> serialized_data_ptr(serialized_bytes.data(),
|
||||
[&](uint8_t*) {});
|
||||
auto new_insert_data = storage::DeserializeFileData(
|
||||
serialized_data_ptr, serialized_bytes.size());
|
||||
ASSERT_EQ(new_insert_data->GetCodecType(), storage::InsertDataType);
|
||||
ASSERT_EQ(new_insert_data->GetTimeRage(),
|
||||
std::make_pair(Timestamp(0), Timestamp(100)));
|
||||
auto new_payload = new_insert_data->GetFieldData();
|
||||
ASSERT_EQ(new_payload->get_data_type(), storage::DataType::GEOMETRY);
|
||||
ASSERT_EQ(new_payload->get_num_rows(), data.size());
|
||||
FixedVector<std::string> new_data(data.size());
|
||||
ASSERT_EQ(new_payload->get_null_count(), 0);
|
||||
for (int i = 0; i < data.size(); ++i) {
|
||||
new_data[i] =
|
||||
*static_cast<const std::string*>(new_payload->RawValue(i));
|
||||
ASSERT_EQ(new_payload->DataSize(i), data[i].size());
|
||||
}
|
||||
ASSERT_EQ(data, new_data);
|
||||
}
|
||||
|
||||
TEST(storage, InsertDataGeometryNullable) {
|
||||
auto ctx = GEOS_init_r();
|
||||
|
||||
// Prepare five simple point geometries in WKB format using WKT strings directly
|
||||
const char* p1_wkt = "POINT (0.0 0.0)";
|
||||
const char* p2_wkt = "POINT (1.0 1.0)";
|
||||
const char* p3_wkt = "POINT (2.0 2.0)";
|
||||
const char* p4_wkt = "POINT (3.0 3.0)";
|
||||
const char* p5_wkt = "POINT (4.0 4.0)";
|
||||
|
||||
std::string str1 = Geometry(ctx, p1_wkt).to_wkb_string();
|
||||
std::string str2 = Geometry(ctx, p2_wkt).to_wkb_string();
|
||||
std::string str3 = Geometry(ctx, p3_wkt).to_wkb_string();
|
||||
std::string str4 = Geometry(ctx, p4_wkt).to_wkb_string();
|
||||
std::string str5 = Geometry(ctx, p5_wkt).to_wkb_string();
|
||||
|
||||
GEOS_finish_r(ctx);
|
||||
|
||||
FixedVector<std::string> data = {str1, str2, str3, str4, str5};
|
||||
|
||||
// Create nullable geometry FieldData
|
||||
auto field_data = milvus::storage::CreateFieldData(
|
||||
storage::DataType::GEOMETRY, storage::DataType::NONE, true);
|
||||
// valid_data bitmap: 0xF3 (11110011 b) – rows 0,1,4 valid; rows 2,3 null
|
||||
uint8_t* valid_data = new uint8_t[1]{0xF3};
|
||||
field_data->FillFieldData(data.data(), valid_data, data.size(), 0);
|
||||
|
||||
// Round-trip the payload through InsertData serialization pipeline
|
||||
auto payload_reader =
|
||||
std::make_shared<milvus::storage::PayloadReader>(field_data);
|
||||
storage::InsertData insert_data(payload_reader);
|
||||
storage::FieldDataMeta field_data_meta{100, 101, 102, 103};
|
||||
insert_data.SetFieldDataMeta(field_data_meta);
|
||||
insert_data.SetTimestamps(0, 100);
|
||||
|
||||
auto serialized_bytes = insert_data.Serialize(storage::StorageType::Remote);
|
||||
std::shared_ptr<uint8_t[]> serialized_data_ptr(serialized_bytes.data(),
|
||||
[&](uint8_t*) {});
|
||||
auto new_insert_data = storage::DeserializeFileData(
|
||||
serialized_data_ptr, serialized_bytes.size());
|
||||
|
||||
ASSERT_EQ(new_insert_data->GetCodecType(), storage::InsertDataType);
|
||||
ASSERT_EQ(new_insert_data->GetTimeRage(),
|
||||
std::make_pair(Timestamp(0), Timestamp(100)));
|
||||
|
||||
auto new_payload = new_insert_data->GetFieldData();
|
||||
ASSERT_EQ(new_payload->get_data_type(), storage::DataType::GEOMETRY);
|
||||
ASSERT_EQ(new_payload->get_num_rows(), data.size());
|
||||
// Note: current geometry serialization path writes empty string for null
|
||||
// rows and loses Arrow null-bitmap, so null_count()==0 after round-trip.
|
||||
|
||||
// Expected data: original rows preserved (bitmap ignored by codec)
|
||||
FixedVector<std::string> new_data(data.size());
|
||||
for (int i = 0; i < data.size(); ++i) {
|
||||
new_data[i] =
|
||||
*static_cast<const std::string*>(new_payload->RawValue(i));
|
||||
ASSERT_EQ(new_payload->DataSize(i), data[i].size());
|
||||
}
|
||||
ASSERT_EQ(data, new_data);
|
||||
|
||||
delete[] valid_data;
|
||||
}
|
||||
TEST(storage, InsertDataString) {
|
||||
FixedVector<std::string> data = {
|
||||
"test1", "test2", "test3", "test4", "test5"};
|
||||
|
||||
@ -22,6 +22,7 @@
|
||||
#include "common/Consts.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/FieldMeta.h"
|
||||
#include "common/Geometry.h"
|
||||
#include "common/Json.h"
|
||||
#include "fmt/format.h"
|
||||
#include "nlohmann/json.hpp"
|
||||
@ -318,6 +319,17 @@ BaseEventData::Serialize() {
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
for (size_t offset = 0; offset < field_data->get_num_rows();
|
||||
++offset) {
|
||||
auto geo_ptr = static_cast<const std::string*>(
|
||||
field_data->RawValue(offset));
|
||||
payload_writer->add_one_binary_payload(
|
||||
reinterpret_cast<const uint8_t*>(geo_ptr->data()),
|
||||
geo_ptr->size());
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::VECTOR_SPARSE_U32_F32: {
|
||||
for (size_t offset = 0; offset < field_data->get_num_rows();
|
||||
++offset) {
|
||||
|
||||
@ -377,7 +377,8 @@ CreateArrowBuilder(DataType data_type) {
|
||||
return std::make_shared<arrow::StringBuilder>();
|
||||
}
|
||||
case DataType::ARRAY:
|
||||
case DataType::JSON: {
|
||||
case DataType::JSON:
|
||||
case DataType::GEOMETRY: {
|
||||
return std::make_shared<arrow::BinaryBuilder>();
|
||||
}
|
||||
// sparse float vector doesn't require a dim
|
||||
@ -532,7 +533,8 @@ CreateArrowSchema(DataType data_type, bool nullable) {
|
||||
{arrow::field("val", arrow::utf8(), nullable)});
|
||||
}
|
||||
case DataType::ARRAY:
|
||||
case DataType::JSON: {
|
||||
case DataType::JSON:
|
||||
case DataType::GEOMETRY: {
|
||||
return arrow::schema(
|
||||
{arrow::field("val", arrow::binary(), nullable)});
|
||||
}
|
||||
@ -1067,6 +1069,9 @@ CreateFieldData(const DataType& type,
|
||||
case DataType::JSON:
|
||||
return std::make_shared<FieldData<Json>>(
|
||||
type, nullable, total_num_rows);
|
||||
case DataType::GEOMETRY:
|
||||
return std::make_shared<FieldData<Geometry>>(
|
||||
type, nullable, total_num_rows);
|
||||
case DataType::ARRAY:
|
||||
return std::make_shared<FieldData<Array>>(
|
||||
type, nullable, total_num_rows);
|
||||
|
||||
@ -50,6 +50,8 @@ set(MILVUS_TEST_FILES
|
||||
test_rust_result.cpp
|
||||
test_storage_v2_index_raw_data.cpp
|
||||
test_group_by_json.cpp
|
||||
test_rtree_index_wrapper.cpp
|
||||
test_rtree_index.cpp
|
||||
)
|
||||
|
||||
if ( NOT (INDEX_ENGINE STREQUAL "cardinal") )
|
||||
|
||||
834
internal/core/unittest/test_rtree_index.cpp
Normal file
834
internal/core/unittest/test_rtree_index.cpp
Normal file
@ -0,0 +1,834 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
|
||||
#include "index/RTreeIndex.h"
|
||||
#include "storage/Util.h"
|
||||
#include "storage/FileManager.h"
|
||||
#include "common/Types.h"
|
||||
#include "test_utils/TmpPath.h"
|
||||
#include "pb/schema.pb.h"
|
||||
#include "pb/plan.pb.h"
|
||||
#include "common/Geometry.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "index/IndexFactory.h"
|
||||
#include "storage/InsertData.h"
|
||||
#include "storage/PayloadReader.h"
|
||||
#include "storage/DiskFileManagerImpl.h"
|
||||
#include "test_utils/DataGen.h"
|
||||
#include "query/ExecPlanNodeVisitor.h"
|
||||
#include "common/Consts.h"
|
||||
#include "test_utils/storage_test_utils.h"
|
||||
#include "index/Utils.h"
|
||||
#include "storage/ThreadPools.h"
|
||||
#include "test_utils/cachinglayer_test_utils.h"
|
||||
|
||||
// Helper: create simple POINT(x,y) WKB (little-endian)
|
||||
static std::string
|
||||
CreatePointWKB(double x, double y) {
|
||||
std::vector<uint8_t> wkb;
|
||||
// Byte order – little endian (1)
|
||||
wkb.push_back(0x01);
|
||||
// Geometry type – Point (1) – 32-bit little endian
|
||||
uint32_t geom_type = 1;
|
||||
uint8_t* type_bytes = reinterpret_cast<uint8_t*>(&geom_type);
|
||||
wkb.insert(wkb.end(), type_bytes, type_bytes + sizeof(uint32_t));
|
||||
// X coordinate
|
||||
uint8_t* x_bytes = reinterpret_cast<uint8_t*>(&x);
|
||||
wkb.insert(wkb.end(), x_bytes, x_bytes + sizeof(double));
|
||||
// Y coordinate
|
||||
uint8_t* y_bytes = reinterpret_cast<uint8_t*>(&y);
|
||||
wkb.insert(wkb.end(), y_bytes, y_bytes + sizeof(double));
|
||||
return std::string(reinterpret_cast<const char*>(wkb.data()), wkb.size());
|
||||
}
|
||||
|
||||
// Helper: create simple WKB from WKT
|
||||
static std::string
|
||||
CreateWkbFromWkt(const std::string& wkt) {
|
||||
auto ctx = GEOS_init_r();
|
||||
auto wkb = milvus::Geometry(ctx, wkt.c_str()).to_wkb_string();
|
||||
GEOS_finish_r(ctx);
|
||||
return wkb;
|
||||
}
|
||||
|
||||
static milvus::Geometry
|
||||
CreateGeometryFromWkt(const std::string& wkt) {
|
||||
auto ctx = GEOS_init_r();
|
||||
auto geom = milvus::Geometry(ctx, wkt.c_str());
|
||||
GEOS_finish_r(ctx);
|
||||
return geom;
|
||||
}
|
||||
|
||||
// Helper: write an InsertData parquet file to "remote" storage managed by chunk_manager_
|
||||
static std::string
|
||||
WriteGeometryInsertFile(const milvus::storage::ChunkManagerPtr& cm,
|
||||
const milvus::storage::FieldDataMeta& field_meta,
|
||||
const std::string& remote_path,
|
||||
const std::vector<std::string>& wkbs,
|
||||
bool nullable = false,
|
||||
const uint8_t* valid_bitmap = nullptr) {
|
||||
auto field_data =
|
||||
milvus::storage::CreateFieldData(milvus::storage::DataType::GEOMETRY,
|
||||
milvus::storage::DataType::NONE,
|
||||
nullable);
|
||||
if (nullable && valid_bitmap != nullptr) {
|
||||
field_data->FillFieldData(wkbs.data(), valid_bitmap, wkbs.size(), 0);
|
||||
} else {
|
||||
field_data->FillFieldData(wkbs.data(), wkbs.size());
|
||||
}
|
||||
auto payload_reader =
|
||||
std::make_shared<milvus::storage::PayloadReader>(field_data);
|
||||
milvus::storage::InsertData insert_data(payload_reader);
|
||||
insert_data.SetFieldDataMeta(field_meta);
|
||||
insert_data.SetTimestamps(0, 100);
|
||||
|
||||
auto bytes = insert_data.Serialize(milvus::storage::StorageType::Remote);
|
||||
std::vector<uint8_t> buf(bytes.begin(), bytes.end());
|
||||
cm->Write(remote_path, buf.data(), buf.size());
|
||||
return remote_path;
|
||||
}
|
||||
|
||||
class RTreeIndexTest : public ::testing::Test {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
temp_path_ = milvus::test::TmpPath{};
|
||||
// create storage config that writes to temp dir
|
||||
storage_config_.storage_type = "local";
|
||||
storage_config_.root_path = temp_path_.get().string();
|
||||
chunk_manager_ = milvus::storage::CreateChunkManager(storage_config_);
|
||||
|
||||
// prepare field & index meta – minimal info for DiskFileManagerImpl
|
||||
field_meta_ = milvus::storage::FieldDataMeta{1, 1, 1, 100};
|
||||
// set geometry data type in field schema for index schema checks
|
||||
field_meta_.field_schema.set_data_type(
|
||||
::milvus::proto::schema::DataType::Geometry);
|
||||
index_meta_ = milvus::storage::IndexMeta{.segment_id = 1,
|
||||
.field_id = 100,
|
||||
.build_id = 1,
|
||||
.index_version = 1};
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
// Clean up chunk manager files and index directories
|
||||
try {
|
||||
// Remove all files in the storage root path
|
||||
if (chunk_manager_) {
|
||||
auto root_path = storage_config_.root_path;
|
||||
if (boost::filesystem::exists(root_path)) {
|
||||
for (auto& entry :
|
||||
boost::filesystem::directory_iterator(root_path)) {
|
||||
if (boost::filesystem::is_regular_file(entry)) {
|
||||
boost::filesystem::remove(entry);
|
||||
} else if (boost::filesystem::is_directory(entry)) {
|
||||
boost::filesystem::remove_all(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
boost::filesystem::remove_all("/tmp/milvus/rtree-index/");
|
||||
} catch (const std::exception& e) {
|
||||
// Log error but don't fail the test
|
||||
std::cout << "Warning: Failed to clean up test files: " << e.what()
|
||||
<< std::endl;
|
||||
}
|
||||
// TmpPath destructor will also remove the temp directory
|
||||
}
|
||||
|
||||
// Helper method to clean up index files
|
||||
void
|
||||
CleanupIndexFiles(const std::vector<std::string>& index_files,
|
||||
const std::string& test_name = "") {
|
||||
try {
|
||||
for (const auto& file : index_files) {
|
||||
if (chunk_manager_->Exist(file)) {
|
||||
chunk_manager_->Remove(file);
|
||||
}
|
||||
}
|
||||
} catch (const std::exception& e) {
|
||||
std::cout << "Warning: Failed to clean up " << test_name
|
||||
<< " index files: " << e.what() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
milvus::storage::StorageConfig storage_config_;
|
||||
milvus::storage::ChunkManagerPtr chunk_manager_;
|
||||
milvus::storage::FieldDataMeta field_meta_;
|
||||
milvus::storage::IndexMeta index_meta_;
|
||||
milvus::test::TmpPath temp_path_;
|
||||
};
|
||||
|
||||
TEST_F(RTreeIndexTest, Build_Upload_Load) {
|
||||
// ---------- Build via BuildWithRawDataForUT ----------
|
||||
milvus::storage::FileManagerContext ctx_build(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree_build(ctx_build);
|
||||
|
||||
std::vector<std::string> wkbs = {CreatePointWKB(1.0, 1.0),
|
||||
CreatePointWKB(2.0, 2.0)};
|
||||
rtree_build.BuildWithRawDataForUT(wkbs.size(), wkbs.data());
|
||||
|
||||
ASSERT_EQ(rtree_build.Count(), 2);
|
||||
|
||||
// ---------- Upload ----------
|
||||
auto stats = rtree_build.Upload({});
|
||||
ASSERT_NE(stats, nullptr);
|
||||
ASSERT_GT(stats->GetIndexFiles().size(), 0);
|
||||
|
||||
// ---------- Load back ----------
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = stats->GetIndexFiles();
|
||||
|
||||
milvus::tracer::TraceContext trace_ctx; // empty context
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
|
||||
ASSERT_EQ(rtree_load.Count(), 2);
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Load_WithFileNamesOnly) {
|
||||
// Build & upload first
|
||||
milvus::storage::FileManagerContext ctx_build(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree_build(ctx_build);
|
||||
|
||||
std::vector<std::string> wkbs2 = {CreatePointWKB(10.0, 10.0),
|
||||
CreatePointWKB(20.0, 20.0)};
|
||||
rtree_build.BuildWithRawDataForUT(wkbs2.size(), wkbs2.data());
|
||||
|
||||
auto stats = rtree_build.Upload({});
|
||||
|
||||
// gather only filenames (strip parent path)
|
||||
std::vector<std::string> filenames;
|
||||
for (const auto& path : stats->GetIndexFiles()) {
|
||||
filenames.emplace_back(
|
||||
boost::filesystem::path(path).filename().string());
|
||||
// make sure file exists in remote storage
|
||||
ASSERT_TRUE(chunk_manager_->Exist(path));
|
||||
ASSERT_GT(chunk_manager_->Size(path), 0);
|
||||
}
|
||||
|
||||
// Load using filename only list
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = filenames; // no directory info
|
||||
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
|
||||
ASSERT_EQ(rtree_load.Count(), 2);
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Build_EmptyInput_ShouldThrow) {
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
std::vector<std::string> empty;
|
||||
EXPECT_THROW(rtree.BuildWithRawDataForUT(0, empty.data()),
|
||||
milvus::SegcoreError);
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Build_WithInvalidWKB_Upload_Load) {
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
std::string bad = CreatePointWKB(0.0, 0.0);
|
||||
bad.resize(bad.size() / 2); // truncate to make invalid
|
||||
|
||||
std::vector<std::string> wkbs = {
|
||||
CreateWkbFromWkt("POINT(1 1)"), bad, CreateWkbFromWkt("POINT(2 2)")};
|
||||
rtree.BuildWithRawDataForUT(wkbs.size(), wkbs.data());
|
||||
|
||||
// Upload and then load back to let loader compute count from wrapper
|
||||
auto stats = rtree.Upload({});
|
||||
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = stats->GetIndexFiles();
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
|
||||
// Only 2 valid points should be present
|
||||
ASSERT_EQ(rtree_load.Count(), 2);
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Build_VariousGeometries) {
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
std::vector<std::string> wkbs = {
|
||||
CreateWkbFromWkt("POINT(-1.5 2.5)"),
|
||||
CreateWkbFromWkt("LINESTRING(0 0,1 1,2 3)"),
|
||||
CreateWkbFromWkt("POLYGON((0 0,2 0,2 2,0 2,0 0))"),
|
||||
CreateWkbFromWkt("POINT(1000000 -1000000)"),
|
||||
CreateWkbFromWkt("POINT(0 0)")};
|
||||
|
||||
rtree.BuildWithRawDataForUT(wkbs.size(), wkbs.data());
|
||||
ASSERT_EQ(rtree.Count(), wkbs.size());
|
||||
|
||||
auto stats = rtree.Upload({});
|
||||
ASSERT_FALSE(stats->GetIndexFiles().empty());
|
||||
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = stats->GetIndexFiles();
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
ASSERT_EQ(rtree_load.Count(), wkbs.size());
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Build_ConfigAndMetaJson) {
|
||||
// Prepare one insert file via storage pipeline
|
||||
std::vector<std::string> wkbs = {CreateWkbFromWkt("POINT(0 0)"),
|
||||
CreateWkbFromWkt("POINT(1 1)")};
|
||||
auto remote_file = (temp_path_.get() / "geom.parquet").string();
|
||||
WriteGeometryInsertFile(chunk_manager_, field_meta_, remote_file, wkbs);
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
nlohmann::json build_cfg;
|
||||
build_cfg["insert_files"] = std::vector<std::string>{remote_file};
|
||||
|
||||
rtree.Build(build_cfg);
|
||||
auto stats = rtree.Upload({});
|
||||
|
||||
// Cache remote index files locally
|
||||
milvus::storage::DiskFileManagerImpl diskfm(
|
||||
{field_meta_, index_meta_, chunk_manager_});
|
||||
auto index_files = stats->GetIndexFiles();
|
||||
auto load_priority =
|
||||
milvus::index::GetValueFromConfig<milvus::proto::common::LoadPriority>(
|
||||
build_cfg, milvus::LOAD_PRIORITY)
|
||||
.value_or(milvus::proto::common::LoadPriority::HIGH);
|
||||
diskfm.CacheIndexToDisk(index_files, load_priority);
|
||||
auto local_paths = diskfm.GetLocalFilePaths();
|
||||
ASSERT_FALSE(local_paths.empty());
|
||||
// Determine base path like RTreeIndex::Load
|
||||
auto ends_with = [](const std::string& value, const std::string& suffix) {
|
||||
return value.size() >= suffix.size() &&
|
||||
value.compare(
|
||||
value.size() - suffix.size(), suffix.size(), suffix) == 0;
|
||||
};
|
||||
|
||||
std::string base_path;
|
||||
for (const auto& p : local_paths) {
|
||||
if (ends_with(p, ".bgi")) {
|
||||
base_path = p.substr(0, p.size() - 4);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (base_path.empty()) {
|
||||
for (const auto& p : local_paths) {
|
||||
if (ends_with(p, ".meta.json")) {
|
||||
base_path =
|
||||
p.substr(0, p.size() - std::string(".meta.json").size());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (base_path.empty()) {
|
||||
base_path = local_paths.front();
|
||||
}
|
||||
// Parse local meta json
|
||||
std::ifstream ifs(base_path + ".meta.json");
|
||||
ASSERT_TRUE(ifs.good());
|
||||
nlohmann::json meta = nlohmann::json::parse(ifs);
|
||||
ASSERT_EQ(meta["dimension"], 2);
|
||||
|
||||
// Clean up config and meta test files
|
||||
CleanupIndexFiles(stats->GetIndexFiles(), "config test");
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Load_MixedFileNamesAndPaths) {
|
||||
// Build and upload
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
std::vector<std::string> wkbs = {CreatePointWKB(6.0, 6.0),
|
||||
CreatePointWKB(7.0, 7.0)};
|
||||
rtree.BuildWithRawDataForUT(wkbs.size(), wkbs.data());
|
||||
auto stats = rtree.Upload({});
|
||||
|
||||
// Use full list, but replace one with filename-only
|
||||
auto mixed = stats->GetIndexFiles();
|
||||
ASSERT_FALSE(mixed.empty());
|
||||
mixed[0] = boost::filesystem::path(mixed[0]).filename().string();
|
||||
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = mixed;
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
ASSERT_EQ(rtree_load.Count(), wkbs.size());
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Load_NonexistentRemote_ShouldThrow) {
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
|
||||
// nonexist file
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = std::vector<std::string>{
|
||||
(temp_path_.get() / "does_not_exist.bgi_0").string()};
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
EXPECT_THROW(rtree_load.Load(trace_ctx, cfg), milvus::SegcoreError);
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Build_EndToEnd_FromInsertFiles) {
|
||||
// prepare remote file via InsertData serialization
|
||||
std::vector<std::string> wkbs = {CreateWkbFromWkt("POINT(0 0)"),
|
||||
CreateWkbFromWkt("POINT(2 2)")};
|
||||
auto remote_file = (temp_path_.get() / "geom3.parquet").string();
|
||||
WriteGeometryInsertFile(chunk_manager_, field_meta_, remote_file, wkbs);
|
||||
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
nlohmann::json build_cfg;
|
||||
build_cfg["insert_files"] = std::vector<std::string>{remote_file};
|
||||
|
||||
rtree.Build(build_cfg);
|
||||
ASSERT_EQ(rtree.Count(), wkbs.size());
|
||||
|
||||
auto stats = rtree.Upload({});
|
||||
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = stats->GetIndexFiles();
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
ASSERT_EQ(rtree_load.Count(), wkbs.size());
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Build_Upload_Load_LargeDataset) {
|
||||
// Generate ~10k POINT geometries
|
||||
const size_t N = 10000;
|
||||
std::vector<std::string> wkbs;
|
||||
wkbs.reserve(N);
|
||||
for (size_t i = 0; i < N; ++i) {
|
||||
// POINT(i i)
|
||||
wkbs.emplace_back(CreateWkbFromWkt("POINT(" + std::to_string(i) + " " +
|
||||
std::to_string(i) + ")"));
|
||||
}
|
||||
|
||||
// Write one insert file into remote storage
|
||||
auto remote_file = (temp_path_.get() / "geom_large.parquet").string();
|
||||
WriteGeometryInsertFile(chunk_manager_, field_meta_, remote_file, wkbs);
|
||||
|
||||
// Build from insert_files (not using BuildWithRawDataForUT)
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
nlohmann::json build_cfg;
|
||||
build_cfg["insert_files"] = std::vector<std::string>{remote_file};
|
||||
|
||||
rtree.Build(build_cfg);
|
||||
|
||||
ASSERT_EQ(rtree.Count(), static_cast<int64_t>(N));
|
||||
|
||||
// Upload index
|
||||
auto stats = rtree.Upload({});
|
||||
ASSERT_GT(stats->GetIndexFiles().size(), 0);
|
||||
|
||||
// Load index back and verify
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
|
||||
nlohmann::json cfg_load;
|
||||
cfg_load["index_files"] = stats->GetIndexFiles();
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg_load);
|
||||
|
||||
ASSERT_EQ(rtree_load.Count(), static_cast<int64_t>(N));
|
||||
|
||||
// Clean up large dataset index files to avoid conflicts
|
||||
CleanupIndexFiles(stats->GetIndexFiles(), "large dataset");
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Build_BulkLoad_Nulls_And_BadWKB) {
|
||||
// five geometries:
|
||||
// 1. valid
|
||||
// 2. valid but will be marked null
|
||||
// 3. valid
|
||||
// 4. will be truncated to make invalid
|
||||
// 5. valid
|
||||
std::vector<std::string> wkbs = {
|
||||
CreateWkbFromWkt("POINT(0 0)"), // valid
|
||||
CreateWkbFromWkt("POINT(1 1)"), // valid
|
||||
CreateWkbFromWkt("POINT(2 2)"), // valid
|
||||
CreatePointWKB(3.0, 3.0), // will be truncated to make invalid
|
||||
CreateWkbFromWkt("POINT(4 4)") // valid
|
||||
};
|
||||
// make bad WKB: truncate the 4th geometry
|
||||
wkbs[3].resize(wkbs[3].size() / 2);
|
||||
|
||||
// write to remote storage file (chunk manager's root directory)
|
||||
auto remote_file = (temp_path_.get() / "geom_bulk.parquet").string();
|
||||
WriteGeometryInsertFile(chunk_manager_, field_meta_, remote_file, wkbs);
|
||||
|
||||
// build (default to bulk load)
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
nlohmann::json build_cfg;
|
||||
build_cfg["insert_files"] = std::vector<std::string>{remote_file};
|
||||
|
||||
rtree.Build(build_cfg);
|
||||
|
||||
// expect: 3 geometries (0, 2, 4) are valid and parsable, 1st geometry is marked null and skipped, 3rd geometry is bad WKB and skipped
|
||||
ASSERT_EQ(rtree.Count(), 4);
|
||||
|
||||
// upload -> load back and verify consistency
|
||||
auto stats = rtree.Upload({});
|
||||
ASSERT_GT(stats->GetIndexFiles().size(), 0);
|
||||
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = stats->GetIndexFiles();
|
||||
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
ASSERT_EQ(rtree_load.Count(), 4);
|
||||
}
|
||||
|
||||
// The following two tests only test the coarse query (R-Tree) and not the exact query (GDAL)
|
||||
|
||||
TEST_F(RTreeIndexTest, Query_CoarseAndExact_Equals_Intersects_Within) {
|
||||
// Build a small index in-memory (via UT API)
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
// Prepare simple geometries: two points and a square polygon
|
||||
std::vector<std::string> wkbs;
|
||||
wkbs.emplace_back(CreateWkbFromWkt("POINT(0 0)")); // id 0
|
||||
wkbs.emplace_back(CreateWkbFromWkt("POINT(2 2)")); // id 1
|
||||
wkbs.emplace_back(
|
||||
CreateWkbFromWkt("POLYGON((0 0, 0 3, 3 3, 3 0, 0 0))")); // id 2 square
|
||||
|
||||
rtree.BuildWithRawDataForUT(wkbs.size(), wkbs.data(), {});
|
||||
ASSERT_EQ(rtree.Count(), 3);
|
||||
|
||||
// Upload and then load into a new index instance for querying
|
||||
auto stats = rtree.Upload({});
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = stats->GetIndexFiles();
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
|
||||
// Helper to run Query
|
||||
auto run_query = [&](::milvus::proto::plan::GISFunctionFilterExpr_GISOp op,
|
||||
const std::string& wkt) {
|
||||
auto ds = std::make_shared<milvus::Dataset>();
|
||||
ds->Set(milvus::index::OPERATOR_TYPE, op);
|
||||
ds->Set(milvus::index::MATCH_VALUE, CreateGeometryFromWkt(wkt));
|
||||
return rtree_load.Query(ds);
|
||||
};
|
||||
|
||||
// Equals with same point should match id 0 only
|
||||
{
|
||||
auto bm =
|
||||
run_query(::milvus::proto::plan::GISFunctionFilterExpr_GISOp_Equals,
|
||||
"POINT(0 0)");
|
||||
EXPECT_TRUE(bm[0]);
|
||||
EXPECT_FALSE(bm[1]);
|
||||
EXPECT_TRUE(
|
||||
bm[2]); //This is true because POINT(0 0) is within the square (0 0, 0 3, 3 3, 3 0, 0 0) and we have not done exact spatial query yet
|
||||
}
|
||||
|
||||
// Intersects: square intersects point (on boundary considered intersect)
|
||||
{
|
||||
auto bm = run_query(
|
||||
::milvus::proto::plan::GISFunctionFilterExpr_GISOp_Intersects,
|
||||
"POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))");
|
||||
// square(0..1) intersects POINT(0,0) and POLYGON(0..3)
|
||||
// but not POINT(2,2)
|
||||
EXPECT_TRUE(bm[0]); // point (0,0)
|
||||
EXPECT_FALSE(bm[1]); // point (2,2)
|
||||
EXPECT_TRUE(bm[2]); // big polygon
|
||||
}
|
||||
|
||||
// Within: point within the big square
|
||||
{
|
||||
auto bm =
|
||||
run_query(::milvus::proto::plan::GISFunctionFilterExpr_GISOp_Within,
|
||||
"POLYGON((0 0, 0 3, 3 3, 3 0, 0 0))");
|
||||
EXPECT_TRUE(
|
||||
bm[0]); // (0,0) is within or on boundary considered within by GDAL Within?
|
||||
// GDAL Within returns true only if strictly inside (no boundary). If boundary excluded, (0,0) may be false.
|
||||
// To make assertion robust across GEOS versions, simply check big polygon within itself should be true.
|
||||
auto bm_poly =
|
||||
run_query(::milvus::proto::plan::GISFunctionFilterExpr_GISOp_Within,
|
||||
"POLYGON((0 0, 0 3, 3 3, 3 0, 0 0))");
|
||||
EXPECT_TRUE(bm_poly[2]);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, Query_Touches_Contains_Crosses_Overlaps) {
|
||||
milvus::storage::FileManagerContext ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
milvus::index::RTreeIndex<std::string> rtree(ctx);
|
||||
|
||||
// Two overlapping squares and one disjoint square
|
||||
std::vector<std::string> wkbs;
|
||||
wkbs.emplace_back(
|
||||
CreateWkbFromWkt("POLYGON((0 0, 0 2, 2 2, 2 0, 0 0))")); // id 0
|
||||
wkbs.emplace_back(CreateWkbFromWkt(
|
||||
"POLYGON((1 1, 1 3, 3 3, 3 1, 1 1))")); // id 1 overlaps with 0
|
||||
wkbs.emplace_back(CreateWkbFromWkt(
|
||||
"POLYGON((4 4, 4 5, 5 5, 5 4, 4 4))")); // id 2 disjoint
|
||||
|
||||
rtree.BuildWithRawDataForUT(wkbs.size(), wkbs.data(), {});
|
||||
ASSERT_EQ(rtree.Count(), 3);
|
||||
|
||||
// Upload and load a new instance for querying
|
||||
auto stats = rtree.Upload({});
|
||||
milvus::storage::FileManagerContext ctx_load(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
ctx_load.set_for_loading_index(true);
|
||||
milvus::index::RTreeIndex<std::string> rtree_load(ctx_load);
|
||||
nlohmann::json cfg;
|
||||
cfg["index_files"] = stats->GetIndexFiles();
|
||||
milvus::tracer::TraceContext trace_ctx;
|
||||
rtree_load.Load(trace_ctx, cfg);
|
||||
|
||||
auto run_query = [&](::milvus::proto::plan::GISFunctionFilterExpr_GISOp op,
|
||||
const std::string& wkt) {
|
||||
auto ds = std::make_shared<milvus::Dataset>();
|
||||
ds->Set(milvus::index::OPERATOR_TYPE, op);
|
||||
ds->Set(milvus::index::MATCH_VALUE, CreateGeometryFromWkt(wkt));
|
||||
return rtree_load.Query(ds);
|
||||
};
|
||||
|
||||
// Overlaps: query polygon overlapping both 0 and 1
|
||||
{
|
||||
auto bm = run_query(
|
||||
::milvus::proto::plan::GISFunctionFilterExpr_GISOp_Overlaps,
|
||||
"POLYGON((0.5 0.5, 0.5 2.5, 2.5 2.5, 2.5 0.5, 0.5 0.5))");
|
||||
EXPECT_TRUE(bm[0]);
|
||||
EXPECT_TRUE(bm[1]);
|
||||
EXPECT_FALSE(bm[2]);
|
||||
}
|
||||
|
||||
// Contains: big polygon contains small polygon
|
||||
{
|
||||
auto bm = run_query(
|
||||
::milvus::proto::plan::GISFunctionFilterExpr_GISOp_Contains,
|
||||
"POLYGON(( -1 -1, -1 4, 4 4, 4 -1, -1 -1))");
|
||||
EXPECT_TRUE(bm[0]);
|
||||
EXPECT_TRUE(bm[1]);
|
||||
EXPECT_TRUE(bm[2]);
|
||||
}
|
||||
|
||||
// Touches: polygon that only touches at the corner (2,2) with id1
|
||||
{
|
||||
auto bm = run_query(
|
||||
::milvus::proto::plan::GISFunctionFilterExpr_GISOp_Touches,
|
||||
"POLYGON((2 2, 2 3, 3 3, 3 2, 2 2))");
|
||||
// This touches id1 at (2,2); depending on GEOS, touches excludes interior intersection
|
||||
// The id0 might also touch at (2,2). We only assert at least one touch.
|
||||
EXPECT_TRUE(bm[0] || bm[1]);
|
||||
}
|
||||
|
||||
// Crosses: a segment crossing the first polygon
|
||||
{
|
||||
auto bm = run_query(
|
||||
::milvus::proto::plan::GISFunctionFilterExpr_GISOp_Crosses,
|
||||
"LINESTRING( -1 1, 3 1 )");
|
||||
EXPECT_TRUE(bm[0]);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexTest, GIS_Index_Exact_Filtering) {
|
||||
using namespace milvus;
|
||||
using namespace milvus::query;
|
||||
using namespace milvus::segcore;
|
||||
|
||||
// 1) Create schema: id (INT64, primary), vector, geometry
|
||||
auto schema = std::make_shared<Schema>();
|
||||
auto pk_id = schema->AddDebugField("id", DataType::INT64);
|
||||
auto dim = 16;
|
||||
auto vec_id = schema->AddDebugField(
|
||||
"vec", DataType::VECTOR_FLOAT, dim, knowhere::metric::L2);
|
||||
auto geo_id = schema->AddDebugField("geo", DataType::GEOMETRY);
|
||||
schema->set_primary_field_id(pk_id);
|
||||
|
||||
int N = 200;
|
||||
int num_iters = 1;
|
||||
auto full_ds = DataGen(schema, N * num_iters);
|
||||
auto sealed =
|
||||
CreateSealedWithFieldDataLoaded(schema, full_ds, false, {geo_id.get()});
|
||||
|
||||
// Prepare controlled geometry WKBs mirroring the shapes used in growing
|
||||
std::vector<std::string> wkbs;
|
||||
wkbs.reserve(N * num_iters);
|
||||
auto ctx = GEOS_init_r();
|
||||
for (int i = 0; i < N * num_iters; ++i) {
|
||||
if (i % 4 == 0) {
|
||||
wkbs.emplace_back(
|
||||
milvus::Geometry(ctx, "POINT(0 0)").to_wkb_string());
|
||||
} else if (i % 4 == 1) {
|
||||
wkbs.emplace_back(
|
||||
milvus::Geometry(ctx, "POLYGON((-1 -1,1 -1,1 1,-1 1,-1 -1))")
|
||||
.to_wkb_string());
|
||||
} else if (i % 4 == 2) {
|
||||
wkbs.emplace_back(
|
||||
milvus::Geometry(ctx,
|
||||
"POLYGON((10 10,20 10,20 20,10 20,10 10))")
|
||||
.to_wkb_string());
|
||||
} else {
|
||||
wkbs.emplace_back(
|
||||
milvus::Geometry(ctx, "LINESTRING(-1 0,1 0)").to_wkb_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up GEOS context immediately after creating WKB data
|
||||
GEOS_finish_r(ctx);
|
||||
|
||||
// now load the controlled geometry data into sealed
|
||||
auto geo_field_data =
|
||||
milvus::storage::CreateFieldData(milvus::storage::DataType::GEOMETRY,
|
||||
milvus::storage::DataType::NONE,
|
||||
false);
|
||||
geo_field_data->FillFieldData(wkbs.data(), wkbs.size());
|
||||
|
||||
auto cm = milvus::storage::RemoteChunkManagerSingleton::GetInstance()
|
||||
.GetRemoteChunkManager();
|
||||
auto load_info = PrepareSingleFieldInsertBinlog(
|
||||
1, 1, 1, geo_id.get(), {geo_field_data}, cm);
|
||||
sealed->LoadFieldData(load_info);
|
||||
|
||||
// build geometry R-Tree index files and load into sealed
|
||||
// Write a single parquet for geometry to simulate build input
|
||||
// wkbs already prepared above
|
||||
auto remote_file = (temp_path_.get() / "rtree_e2e.parquet").string();
|
||||
WriteGeometryInsertFile(chunk_manager_, field_meta_, remote_file, wkbs);
|
||||
|
||||
// build index files by invoking RTreeIndex::Build
|
||||
milvus::storage::FileManagerContext fm_ctx(
|
||||
field_meta_, index_meta_, chunk_manager_);
|
||||
auto rtree_index =
|
||||
std::make_unique<milvus::index::RTreeIndex<std::string>>(fm_ctx);
|
||||
nlohmann::json build_cfg;
|
||||
build_cfg["insert_files"] = std::vector<std::string>{remote_file};
|
||||
build_cfg["index_type"] = milvus::index::RTREE_INDEX_TYPE;
|
||||
|
||||
rtree_index->Build(build_cfg);
|
||||
auto stats = rtree_index->Upload({});
|
||||
|
||||
// load geometry index into sealed segment
|
||||
milvus::segcore::LoadIndexInfo info{};
|
||||
info.collection_id = 1;
|
||||
info.partition_id = 1;
|
||||
info.segment_id = 1;
|
||||
info.field_id = geo_id.get();
|
||||
info.field_type = DataType::GEOMETRY;
|
||||
info.index_id = 1;
|
||||
info.index_build_id = 1;
|
||||
info.index_version = 1;
|
||||
info.schema = proto::schema::FieldSchema();
|
||||
info.schema.set_data_type(proto::schema::DataType::Geometry);
|
||||
info.index_params["index_type"] = milvus::index::RTREE_INDEX_TYPE;
|
||||
|
||||
nlohmann::json cfg_load;
|
||||
cfg_load["index_files"] = stats->GetIndexFiles();
|
||||
milvus::tracer::TraceContext trace_ctx_load;
|
||||
rtree_index->Load(trace_ctx_load, cfg_load);
|
||||
|
||||
info.cache_index =
|
||||
CreateTestCacheIndex("rtree_index_key", std::move(rtree_index));
|
||||
sealed->LoadIndex(info);
|
||||
|
||||
// 3) Build a GIS filter expression and run exact filtering via segcore
|
||||
auto test_op = [&](const std::string& wkt,
|
||||
proto::plan::GISFunctionFilterExpr_GISOp op,
|
||||
std::function<bool(int)> expected) {
|
||||
auto gis_expr = std::make_shared<milvus::expr::GISFunctionFilterExpr>(
|
||||
milvus::expr::ColumnInfo(geo_id, DataType::GEOMETRY), op, wkt);
|
||||
auto plan = std::make_shared<plan::FilterBitsNode>(DEFAULT_PLANNODE_ID,
|
||||
gis_expr);
|
||||
BitsetType bits =
|
||||
ExecuteQueryExpr(plan, sealed.get(), N * num_iters, MAX_TIMESTAMP);
|
||||
ASSERT_EQ(bits.size(), N * num_iters);
|
||||
for (int i = 0; i < N * num_iters; ++i) {
|
||||
EXPECT_EQ(bool(bits[i]), expected(i)) << "i=" << i;
|
||||
}
|
||||
};
|
||||
|
||||
// exact within: polygon around origin should include indices 0,1,3
|
||||
test_op("POLYGON((-2 -2,2 -2,2 2,-2 2,-2 -2))",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Within,
|
||||
[](int i) { return (i % 4 == 0) || (i % 4 == 1) || (i % 4 == 3); });
|
||||
|
||||
// exact intersects: point (0,0) should intersect point, polygon containing it, and line through it
|
||||
test_op("POINT(0 0)",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Intersects,
|
||||
[](int i) { return (i % 4 == 0) || (i % 4 == 1) || (i % 4 == 3); });
|
||||
|
||||
// exact equals: only the point equals
|
||||
test_op("POINT(0 0)",
|
||||
proto::plan::GISFunctionFilterExpr_GISOp_Equals,
|
||||
[](int i) { return (i % 4 == 0); });
|
||||
|
||||
// Explicit cleanup for this test to avoid conflicts
|
||||
sealed.reset(); // Release the sealed segment first
|
||||
|
||||
// Clean up any remaining index files
|
||||
CleanupIndexFiles(stats->GetIndexFiles(), "GIS filtering test");
|
||||
}
|
||||
210
internal/core/unittest/test_rtree_index_wrapper.cpp
Normal file
210
internal/core/unittest/test_rtree_index_wrapper.cpp
Normal file
@ -0,0 +1,210 @@
|
||||
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <filesystem>
|
||||
#include <vector>
|
||||
#include "index/RTreeIndexWrapper.h"
|
||||
#include "common/Geometry.h"
|
||||
|
||||
class RTreeIndexWrapperTest : public ::testing::Test {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
// Create test directory
|
||||
test_dir_ = "/tmp/rtree_test";
|
||||
std::filesystem::create_directories(test_dir_);
|
||||
|
||||
// Initialize GEOS
|
||||
ctx_ = GEOS_init_r();
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
// Clean up test directory
|
||||
std::filesystem::remove_all(test_dir_);
|
||||
|
||||
// Clean up GEOS
|
||||
GEOS_finish_r(ctx_);
|
||||
}
|
||||
|
||||
// Helper function to create a simple point WKB using GEOS
|
||||
std::string
|
||||
create_point_wkb(double x, double y) {
|
||||
std::string wkt =
|
||||
"POINT (" + std::to_string(x) + " " + std::to_string(y) + ")";
|
||||
milvus::Geometry geom(ctx_, wkt.c_str());
|
||||
return geom.to_wkb_string();
|
||||
}
|
||||
|
||||
// Helper function to create a simple polygon WKB using GEOS
|
||||
std::string
|
||||
create_polygon_wkb(const std::vector<std::pair<double, double>>& points) {
|
||||
std::string wkt = "POLYGON ((";
|
||||
for (size_t i = 0; i < points.size(); ++i) {
|
||||
if (i > 0)
|
||||
wkt += ", ";
|
||||
wkt += std::to_string(points[i].first) + " " +
|
||||
std::to_string(points[i].second);
|
||||
}
|
||||
wkt += "))";
|
||||
|
||||
milvus::Geometry geom(ctx_, wkt.c_str());
|
||||
return geom.to_wkb_string();
|
||||
}
|
||||
|
||||
std::string test_dir_;
|
||||
GEOSContextHandle_t ctx_;
|
||||
};
|
||||
|
||||
TEST_F(RTreeIndexWrapperTest, TestBuildAndLoad) {
|
||||
std::string index_path = test_dir_ + "/test_index";
|
||||
|
||||
// Test building index
|
||||
{
|
||||
milvus::index::RTreeIndexWrapper wrapper(index_path, true);
|
||||
|
||||
// Add some test geometries
|
||||
auto point1_wkb = create_point_wkb(1.0, 1.0);
|
||||
auto point2_wkb = create_point_wkb(2.0, 2.0);
|
||||
auto point3_wkb = create_point_wkb(3.0, 3.0);
|
||||
|
||||
wrapper.add_geometry(
|
||||
reinterpret_cast<const uint8_t*>(point1_wkb.data()),
|
||||
point1_wkb.size(),
|
||||
0);
|
||||
wrapper.add_geometry(
|
||||
reinterpret_cast<const uint8_t*>(point2_wkb.data()),
|
||||
point2_wkb.size(),
|
||||
1);
|
||||
wrapper.add_geometry(
|
||||
reinterpret_cast<const uint8_t*>(point3_wkb.data()),
|
||||
point3_wkb.size(),
|
||||
2);
|
||||
|
||||
wrapper.finish();
|
||||
}
|
||||
|
||||
// Test loading index
|
||||
{
|
||||
milvus::index::RTreeIndexWrapper wrapper(index_path, false);
|
||||
wrapper.load();
|
||||
|
||||
// Create a query geometry (polygon that contains points 1 and 2)
|
||||
auto query_polygon_wkb = create_polygon_wkb(
|
||||
{{0.0, 0.0}, {2.5, 0.0}, {2.5, 2.5}, {0.0, 2.5}, {0.0, 0.0}});
|
||||
|
||||
milvus::Geometry query_geom(
|
||||
ctx_,
|
||||
reinterpret_cast<const void*>(query_polygon_wkb.data()),
|
||||
query_polygon_wkb.size());
|
||||
|
||||
std::vector<int64_t> candidates;
|
||||
wrapper.query_candidates(
|
||||
milvus::proto::plan::GISFunctionFilterExpr_GISOp_Intersects,
|
||||
query_geom.GetGeometry(),
|
||||
ctx_,
|
||||
candidates);
|
||||
|
||||
// Should find points 1 and 2, but not point 3
|
||||
EXPECT_EQ(candidates.size(), 2);
|
||||
EXPECT_TRUE(std::find(candidates.begin(), candidates.end(), 0) !=
|
||||
candidates.end());
|
||||
EXPECT_TRUE(std::find(candidates.begin(), candidates.end(), 1) !=
|
||||
candidates.end());
|
||||
EXPECT_TRUE(std::find(candidates.begin(), candidates.end(), 2) ==
|
||||
candidates.end());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexWrapperTest, TestQueryOperations) {
|
||||
std::string index_path = test_dir_ + "/test_query_index";
|
||||
|
||||
// Build index with various geometries
|
||||
{
|
||||
milvus::index::RTreeIndexWrapper wrapper(index_path, true);
|
||||
|
||||
// Add a polygon
|
||||
auto polygon_wkb = create_polygon_wkb(
|
||||
{{0.0, 0.0}, {10.0, 0.0}, {10.0, 10.0}, {0.0, 10.0}, {0.0, 0.0}});
|
||||
wrapper.add_geometry(
|
||||
reinterpret_cast<const uint8_t*>(polygon_wkb.data()),
|
||||
polygon_wkb.size(),
|
||||
0);
|
||||
|
||||
// Add some points
|
||||
auto point1_wkb = create_point_wkb(5.0, 5.0); // Inside polygon
|
||||
auto point2_wkb = create_point_wkb(15.0, 15.0); // Outside polygon
|
||||
auto point3_wkb = create_point_wkb(1.0, 1.0); // Inside polygon
|
||||
|
||||
wrapper.add_geometry(
|
||||
reinterpret_cast<const uint8_t*>(point1_wkb.data()),
|
||||
point1_wkb.size(),
|
||||
1);
|
||||
wrapper.add_geometry(
|
||||
reinterpret_cast<const uint8_t*>(point2_wkb.data()),
|
||||
point2_wkb.size(),
|
||||
2);
|
||||
wrapper.add_geometry(
|
||||
reinterpret_cast<const uint8_t*>(point3_wkb.data()),
|
||||
point3_wkb.size(),
|
||||
3);
|
||||
|
||||
wrapper.finish();
|
||||
}
|
||||
|
||||
// Test queries
|
||||
{
|
||||
milvus::index::RTreeIndexWrapper wrapper(index_path, false);
|
||||
wrapper.load();
|
||||
|
||||
// Query with a small polygon that intersects with the large polygon
|
||||
auto query_polygon_wkb = create_polygon_wkb(
|
||||
{{4.0, 4.0}, {6.0, 4.0}, {6.0, 6.0}, {4.0, 6.0}, {4.0, 4.0}});
|
||||
|
||||
milvus::Geometry query_geom(
|
||||
ctx_,
|
||||
reinterpret_cast<const void*>(query_polygon_wkb.data()),
|
||||
query_polygon_wkb.size());
|
||||
|
||||
std::vector<int64_t> candidates;
|
||||
wrapper.query_candidates(
|
||||
milvus::proto::plan::GISFunctionFilterExpr_GISOp_Intersects,
|
||||
query_geom.GetGeometry(),
|
||||
ctx_,
|
||||
candidates);
|
||||
|
||||
// Should find the large polygon and point1, but not point2 or point3
|
||||
EXPECT_EQ(candidates.size(), 2);
|
||||
EXPECT_TRUE(std::find(candidates.begin(), candidates.end(), 0) !=
|
||||
candidates.end());
|
||||
EXPECT_TRUE(std::find(candidates.begin(), candidates.end(), 1) !=
|
||||
candidates.end());
|
||||
EXPECT_TRUE(std::find(candidates.begin(), candidates.end(), 2) ==
|
||||
candidates.end());
|
||||
EXPECT_TRUE(std::find(candidates.begin(), candidates.end(), 3) ==
|
||||
candidates.end());
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(RTreeIndexWrapperTest, TestInvalidWKB) {
|
||||
std::string index_path = test_dir_ + "/test_invalid_wkb";
|
||||
|
||||
milvus::index::RTreeIndexWrapper wrapper(index_path, true);
|
||||
|
||||
// Test with invalid WKB data
|
||||
std::vector<uint8_t> invalid_wkb = {0x01, 0x02, 0x03, 0x04}; // Invalid WKB
|
||||
|
||||
// This should not crash and should handle the error gracefully
|
||||
wrapper.add_geometry(invalid_wkb.data(), invalid_wkb.size(), 0);
|
||||
|
||||
wrapper.finish();
|
||||
}
|
||||
@ -498,6 +498,7 @@ TEST(Sealed, LoadFieldData) {
|
||||
schema->AddDebugField("int16", DataType::INT16);
|
||||
schema->AddDebugField("float", DataType::FLOAT);
|
||||
schema->AddDebugField("json", DataType::JSON);
|
||||
schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
schema->AddDebugField("array", DataType::ARRAY, DataType::INT64);
|
||||
schema->set_primary_field_id(counter_id);
|
||||
auto int8_nullable_id =
|
||||
@ -678,6 +679,7 @@ TEST(Sealed, ClearData) {
|
||||
schema->AddDebugField("int16", DataType::INT16);
|
||||
schema->AddDebugField("float", DataType::FLOAT);
|
||||
schema->AddDebugField("json", DataType::JSON);
|
||||
schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
schema->AddDebugField("array", DataType::ARRAY, DataType::INT64);
|
||||
schema->set_primary_field_id(counter_id);
|
||||
|
||||
@ -783,6 +785,7 @@ TEST(Sealed, LoadFieldDataMmap) {
|
||||
schema->AddDebugField("int16", DataType::INT16);
|
||||
schema->AddDebugField("float", DataType::FLOAT);
|
||||
schema->AddDebugField("json", DataType::JSON);
|
||||
schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
schema->AddDebugField("array", DataType::ARRAY, DataType::INT64);
|
||||
schema->set_primary_field_id(counter_id);
|
||||
|
||||
@ -1944,6 +1947,7 @@ TEST(Sealed, QueryAllFields) {
|
||||
auto double_field = schema->AddDebugField("double", DataType::DOUBLE);
|
||||
auto varchar_field = schema->AddDebugField("varchar", DataType::VARCHAR);
|
||||
auto json_field = schema->AddDebugField("json", DataType::JSON);
|
||||
auto geometry_field = schema->AddDebugField("geometry", DataType::GEOMETRY);
|
||||
auto int_array_field =
|
||||
schema->AddDebugField("int_array", DataType::ARRAY, DataType::INT8);
|
||||
auto long_array_field =
|
||||
@ -1995,6 +1999,7 @@ TEST(Sealed, QueryAllFields) {
|
||||
auto double_values = dataset.get_col<double>(double_field);
|
||||
auto varchar_values = dataset.get_col<std::string>(varchar_field);
|
||||
auto json_values = dataset.get_col<std::string>(json_field);
|
||||
auto geometry_values = dataset.get_col<std::string>(geometry_field);
|
||||
auto int_array_values = dataset.get_col<ScalarFieldProto>(int_array_field);
|
||||
auto long_array_values =
|
||||
dataset.get_col<ScalarFieldProto>(long_array_field);
|
||||
@ -2030,6 +2035,8 @@ TEST(Sealed, QueryAllFields) {
|
||||
nullptr, varchar_field, ids_ds->GetIds(), dataset_size);
|
||||
auto json_result = segment->bulk_subscript(
|
||||
nullptr, json_field, ids_ds->GetIds(), dataset_size);
|
||||
auto geometry_result = segment->bulk_subscript(
|
||||
nullptr, geometry_field, ids_ds->GetIds(), dataset_size);
|
||||
auto int_array_result = segment->bulk_subscript(
|
||||
nullptr, int_array_field, ids_ds->GetIds(), dataset_size);
|
||||
auto long_array_result = segment->bulk_subscript(
|
||||
@ -2061,6 +2068,8 @@ TEST(Sealed, QueryAllFields) {
|
||||
EXPECT_EQ(varchar_result->scalars().string_data().data_size(),
|
||||
dataset_size);
|
||||
EXPECT_EQ(json_result->scalars().json_data().data_size(), dataset_size);
|
||||
EXPECT_EQ(geometry_result->scalars().geometry_data().data_size(),
|
||||
dataset_size);
|
||||
EXPECT_EQ(vec_result->vectors().float_vector().data_size(),
|
||||
dataset_size * dim);
|
||||
EXPECT_EQ(float16_vec_result->vectors().float16_vector().size(),
|
||||
@ -2113,6 +2122,8 @@ TEST(Sealed, QueryAllNullableFields) {
|
||||
auto varchar_field =
|
||||
schema->AddDebugField("varchar", DataType::VARCHAR, true);
|
||||
auto json_field = schema->AddDebugField("json", DataType::JSON, true);
|
||||
auto geometry_field =
|
||||
schema->AddDebugField("geometry", DataType::GEOMETRY, true);
|
||||
auto int_array_field = schema->AddDebugField(
|
||||
"int_array", DataType::ARRAY, DataType::INT8, true);
|
||||
auto long_array_field = schema->AddDebugField(
|
||||
@ -2158,6 +2169,7 @@ TEST(Sealed, QueryAllNullableFields) {
|
||||
auto double_values = dataset.get_col<double>(double_field);
|
||||
auto varchar_values = dataset.get_col<std::string>(varchar_field);
|
||||
auto json_values = dataset.get_col<std::string>(json_field);
|
||||
auto geometry_values = dataset.get_col<std::string>(geometry_field);
|
||||
auto int_array_values = dataset.get_col<ScalarFieldProto>(int_array_field);
|
||||
auto long_array_values =
|
||||
dataset.get_col<ScalarFieldProto>(long_array_field);
|
||||
@ -2179,6 +2191,7 @@ TEST(Sealed, QueryAllNullableFields) {
|
||||
auto double_valid_values = dataset.get_col_valid(double_field);
|
||||
auto varchar_valid_values = dataset.get_col_valid(varchar_field);
|
||||
auto json_valid_values = dataset.get_col_valid(json_field);
|
||||
auto geometry_valid_values = dataset.get_col_valid(geometry_field);
|
||||
auto int_array_valid_values = dataset.get_col_valid(int_array_field);
|
||||
auto long_array_valid_values = dataset.get_col_valid(long_array_field);
|
||||
auto bool_array_valid_values = dataset.get_col_valid(bool_array_field);
|
||||
@ -2205,6 +2218,8 @@ TEST(Sealed, QueryAllNullableFields) {
|
||||
nullptr, varchar_field, ids_ds->GetIds(), dataset_size);
|
||||
auto json_result = segment->bulk_subscript(
|
||||
nullptr, json_field, ids_ds->GetIds(), dataset_size);
|
||||
auto geometry_result = segment->bulk_subscript(
|
||||
nullptr, geometry_field, ids_ds->GetIds(), dataset_size);
|
||||
auto int_array_result = segment->bulk_subscript(
|
||||
nullptr, int_array_field, ids_ds->GetIds(), dataset_size);
|
||||
auto long_array_result = segment->bulk_subscript(
|
||||
@ -2230,6 +2245,8 @@ TEST(Sealed, QueryAllNullableFields) {
|
||||
EXPECT_EQ(varchar_result->scalars().string_data().data_size(),
|
||||
dataset_size);
|
||||
EXPECT_EQ(json_result->scalars().json_data().data_size(), dataset_size);
|
||||
EXPECT_EQ(geometry_result->scalars().geometry_data().data_size(),
|
||||
dataset_size);
|
||||
EXPECT_EQ(vec_result->vectors().float_vector().data_size(),
|
||||
dataset_size * dim);
|
||||
EXPECT_EQ(int_array_result->scalars().array_data().data_size(),
|
||||
@ -2253,6 +2270,7 @@ TEST(Sealed, QueryAllNullableFields) {
|
||||
EXPECT_EQ(double_result->valid_data_size(), dataset_size);
|
||||
EXPECT_EQ(varchar_result->valid_data_size(), dataset_size);
|
||||
EXPECT_EQ(json_result->valid_data_size(), dataset_size);
|
||||
EXPECT_EQ(geometry_result->valid_data_size(), dataset_size);
|
||||
EXPECT_EQ(int_array_result->valid_data_size(), dataset_size);
|
||||
EXPECT_EQ(long_array_result->valid_data_size(), dataset_size);
|
||||
EXPECT_EQ(bool_array_result->valid_data_size(), dataset_size);
|
||||
|
||||
@ -254,6 +254,16 @@ struct GeneratedData {
|
||||
src_data.begin(), src_data.end(), ret_data);
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
auto ret_data =
|
||||
reinterpret_cast<std::string*>(ret.data());
|
||||
auto src_data = target_field_data.scalars()
|
||||
.geometry_data()
|
||||
.data();
|
||||
std::copy(
|
||||
src_data.begin(), src_data.end(), ret_data);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
ThrowInfo(Unsupported, "unsupported");
|
||||
}
|
||||
@ -433,6 +443,95 @@ InsertCol(InsertRecordProto* insert_data,
|
||||
insert_data->mutable_fields_data()->AddAllocated(array.release());
|
||||
}
|
||||
|
||||
inline std::string
|
||||
generateRandomPoint() {
|
||||
return "POINT(" +
|
||||
std::to_string(static_cast<double>(rand()) / RAND_MAX * 360.0 -
|
||||
180.0) +
|
||||
" " +
|
||||
std::to_string(static_cast<double>(rand()) / RAND_MAX * 180.0 -
|
||||
90.0) +
|
||||
")";
|
||||
}
|
||||
|
||||
inline std::string
|
||||
generateRandomValidLineString(int numPoints) {
|
||||
// Generate a simple line string that doesn't self-intersect
|
||||
double startX = static_cast<double>(rand()) / RAND_MAX * 300.0 - 150.0;
|
||||
double startY = static_cast<double>(rand()) / RAND_MAX * 160.0 - 80.0;
|
||||
|
||||
std::string wkt = "LINESTRING (";
|
||||
wkt += std::to_string(startX) + " " + std::to_string(startY);
|
||||
|
||||
for (int i = 1; i < numPoints; ++i) {
|
||||
// Generate next point with some distance from previous point
|
||||
double deltaX = (static_cast<double>(rand()) / RAND_MAX - 0.5) * 20.0;
|
||||
double deltaY = (static_cast<double>(rand()) / RAND_MAX - 0.5) * 20.0;
|
||||
|
||||
startX += deltaX;
|
||||
startY += deltaY;
|
||||
|
||||
wkt += ", " + std::to_string(startX) + " " + std::to_string(startY);
|
||||
}
|
||||
|
||||
wkt += ")";
|
||||
return wkt;
|
||||
}
|
||||
|
||||
inline std::string
|
||||
generateRandomValidPolygon(int numPoints) {
|
||||
// Generate a simple convex polygon to avoid self-intersection
|
||||
if (numPoints < 3)
|
||||
numPoints = 3;
|
||||
|
||||
// Generate center point
|
||||
double centerX = static_cast<double>(rand()) / RAND_MAX * 300.0 - 150.0;
|
||||
double centerY = static_cast<double>(rand()) / RAND_MAX * 160.0 - 80.0;
|
||||
|
||||
// Generate radius
|
||||
double radius = 5.0 + static_cast<double>(rand()) / RAND_MAX * 15.0;
|
||||
|
||||
std::string wkt = "POLYGON ((";
|
||||
|
||||
// Generate points in a circle to form a convex polygon
|
||||
for (int i = 0; i < numPoints; ++i) {
|
||||
double angle = 2.0 * M_PI * i / numPoints;
|
||||
double x = centerX + radius * cos(angle);
|
||||
double y = centerY + radius * sin(angle);
|
||||
|
||||
if (i > 0)
|
||||
wkt += ", ";
|
||||
wkt += std::to_string(x) + " " + std::to_string(y);
|
||||
}
|
||||
|
||||
// Close the ring by repeating the first point
|
||||
double angle = 0.0;
|
||||
double x = centerX + radius * cos(angle);
|
||||
double y = centerY + radius * sin(angle);
|
||||
wkt += ", " + std::to_string(x) + " " + std::to_string(y);
|
||||
|
||||
wkt += "))";
|
||||
return wkt;
|
||||
}
|
||||
|
||||
inline std::string
|
||||
GenRandomGeometry() {
|
||||
int geomType = rand() % 3; // Randomly select a geometry type (0 to 2)
|
||||
switch (geomType) {
|
||||
case 0: {
|
||||
return generateRandomPoint();
|
||||
}
|
||||
case 1: {
|
||||
return generateRandomValidLineString(5);
|
||||
}
|
||||
case 2: {
|
||||
return generateRandomValidPolygon(5);
|
||||
}
|
||||
default:
|
||||
return generateRandomPoint();
|
||||
}
|
||||
}
|
||||
|
||||
inline GeneratedData
|
||||
DataGen(SchemaPtr schema,
|
||||
int64_t N,
|
||||
@ -795,6 +894,21 @@ DataGen(SchemaPtr schema,
|
||||
insert_cols(data, N, field_meta, random_valid);
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
vector<std::string> data(N);
|
||||
auto ctx = GEOS_init_r();
|
||||
for (int i = 0; i < N / repeat_count; i++) {
|
||||
std::string wkt = GenRandomGeometry();
|
||||
Geometry geom(ctx, wkt.c_str());
|
||||
std::string wkb = geom.to_wkb_string();
|
||||
for (int j = 0; j < repeat_count; j++) {
|
||||
data[i * repeat_count + j] = wkb;
|
||||
}
|
||||
}
|
||||
GEOS_finish_r(ctx);
|
||||
insert_cols(data, N, field_meta, random_valid);
|
||||
break;
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
vector<ScalarFieldProto> data(N);
|
||||
switch (field_meta.get_element_type()) {
|
||||
@ -1413,6 +1527,24 @@ CreateFieldDataFromDataArray(ssize_t raw_count,
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::GEOMETRY: {
|
||||
auto src_data = data->scalars().geometry_data().data();
|
||||
std::vector<std::string> data_raw(src_data.size());
|
||||
for (int i = 0; i < src_data.size(); i++) {
|
||||
auto str = src_data.Get(i);
|
||||
data_raw[i] = std::move(std::string(str));
|
||||
}
|
||||
if (field_meta.is_nullable()) {
|
||||
auto raw_valid_data = data->valid_data().data();
|
||||
createNullableFieldData(data_raw.data(),
|
||||
raw_valid_data,
|
||||
DataType::GEOMETRY,
|
||||
dim);
|
||||
} else {
|
||||
createFieldData(data_raw.data(), DataType::GEOMETRY, dim);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::ARRAY: {
|
||||
auto src_data = data->scalars().array_data().data();
|
||||
std::vector<Array> data_raw(src_data.size());
|
||||
|
||||
@ -562,6 +562,8 @@ func checkAndSetData(body []byte, collSchema *schemapb.CollectionSchema, partial
|
||||
}
|
||||
case schemapb.DataType_JSON:
|
||||
reallyData[fieldName] = []byte(dataString)
|
||||
case schemapb.DataType_Geometry:
|
||||
reallyData[fieldName] = dataString
|
||||
case schemapb.DataType_Float:
|
||||
result, err := cast.ToFloat32E(dataString)
|
||||
if err != nil {
|
||||
@ -785,6 +787,8 @@ func anyToColumns(rows []map[string]interface{}, validDataMap map[string][]bool,
|
||||
data = make([]*schemapb.ScalarField, 0, rowsLen)
|
||||
case schemapb.DataType_JSON:
|
||||
data = make([][]byte, 0, rowsLen)
|
||||
case schemapb.DataType_Geometry:
|
||||
data = make([]string, 0, rowsLen)
|
||||
case schemapb.DataType_FloatVector:
|
||||
data = make([][]float32, 0, rowsLen)
|
||||
dim, _ := getDim(field)
|
||||
@ -885,6 +889,8 @@ func anyToColumns(rows []map[string]interface{}, validDataMap map[string][]bool,
|
||||
nameColumns[field.Name] = append(nameColumns[field.Name].([]*schemapb.ScalarField), candi.v.Interface().(*schemapb.ScalarField))
|
||||
case schemapb.DataType_JSON:
|
||||
nameColumns[field.Name] = append(nameColumns[field.Name].([][]byte), candi.v.Interface().([]byte))
|
||||
case schemapb.DataType_Geometry:
|
||||
nameColumns[field.Name] = append(nameColumns[field.Name].([]string), candi.v.Interface().(string))
|
||||
case schemapb.DataType_FloatVector:
|
||||
nameColumns[field.Name] = append(nameColumns[field.Name].([][]float32), candi.v.Interface().([]float32))
|
||||
case schemapb.DataType_BinaryVector:
|
||||
@ -1077,6 +1083,16 @@ func anyToColumns(rows []map[string]interface{}, validDataMap map[string][]bool,
|
||||
},
|
||||
},
|
||||
}
|
||||
case schemapb.DataType_Geometry:
|
||||
colData.Field = &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_GeometryWktData{
|
||||
GeometryWktData: &schemapb.GeometryWktArray{
|
||||
Data: column.([]string),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
case schemapb.DataType_FloatVector:
|
||||
dim := nameDims[name]
|
||||
arr, err := convertFloatVectorToArray(column.([][]float32), dim)
|
||||
@ -1382,6 +1398,8 @@ func buildQueryResp(rowsNum int64, needFields []string, fieldDataList []*schemap
|
||||
rowsNum = int64(len(fieldDataList[0].GetScalars().GetArrayData().GetData()))
|
||||
case schemapb.DataType_JSON:
|
||||
rowsNum = int64(len(fieldDataList[0].GetScalars().GetJsonData().GetData()))
|
||||
case schemapb.DataType_Geometry:
|
||||
rowsNum = int64(len(fieldDataList[0].GetScalars().GetGeometryWktData().Data))
|
||||
case schemapb.DataType_BinaryVector:
|
||||
rowsNum = int64(len(fieldDataList[0].GetVectors().GetBinaryVector())*8) / fieldDataList[0].GetVectors().GetDim()
|
||||
case schemapb.DataType_FloatVector:
|
||||
@ -1542,6 +1560,12 @@ func buildQueryResp(rowsNum int64, needFields []string, fieldDataList []*schemap
|
||||
}
|
||||
}
|
||||
}
|
||||
case schemapb.DataType_Geometry:
|
||||
if len(fieldDataList[j].ValidData) != 0 && !fieldDataList[j].ValidData[i] {
|
||||
row[fieldDataList[j].FieldName] = nil
|
||||
continue
|
||||
}
|
||||
row[fieldDataList[j].FieldName] = fieldDataList[j].GetScalars().GetGeometryWktData().Data[i]
|
||||
default:
|
||||
row[fieldDataList[j].GetFieldName()] = ""
|
||||
}
|
||||
|
||||
@ -1377,6 +1377,12 @@ func compareRow(m1 map[string]interface{}, m2 map[string]interface{}) bool {
|
||||
if arr1 != string(arr2) {
|
||||
return false
|
||||
}
|
||||
} else if key == "field-geometry" {
|
||||
arr1 := value.(string)
|
||||
arr2 := m2[key].(string)
|
||||
if arr2 != arr1 {
|
||||
return false
|
||||
}
|
||||
} else if strings.HasPrefix(key, "array-") {
|
||||
continue
|
||||
} else if value != m2[key] {
|
||||
@ -1385,7 +1391,7 @@ func compareRow(m1 map[string]interface{}, m2 map[string]interface{}) bool {
|
||||
}
|
||||
|
||||
for key, value := range m2 {
|
||||
if (key == FieldBookIntro) || (key == "field-json") || (key == "field-array") {
|
||||
if (key == FieldBookIntro) || (key == "field-json") || (key == "field-geometry") || (key == "field-array") {
|
||||
continue
|
||||
} else if strings.HasPrefix(key, "array-") {
|
||||
continue
|
||||
@ -1485,6 +1491,12 @@ func newCollectionSchema(coll *schemapb.CollectionSchema) *schemapb.CollectionSc
|
||||
}
|
||||
coll.Fields = append(coll.Fields, &fieldSchema10)
|
||||
|
||||
fieldSchema11 := schemapb.FieldSchema{
|
||||
Name: "field-geometry",
|
||||
DataType: schemapb.DataType_Geometry,
|
||||
IsDynamic: false,
|
||||
}
|
||||
coll.Fields = append(coll.Fields, &fieldSchema11)
|
||||
return coll
|
||||
}
|
||||
|
||||
@ -1731,6 +1743,27 @@ func newFieldData(fieldDatas []*schemapb.FieldData, firstFieldType schemapb.Data
|
||||
}
|
||||
fieldDatas = append(fieldDatas, &fieldData11)
|
||||
|
||||
fieldData12 := schemapb.FieldData{
|
||||
Type: schemapb.DataType_Geometry,
|
||||
FieldName: "field-geometry",
|
||||
Field: &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_GeometryWktData{
|
||||
GeometryWktData: &schemapb.GeometryWktArray{
|
||||
Data: []string{
|
||||
`POINT (30.123 -10.456)`,
|
||||
`POINT (30.123 -10.456)`,
|
||||
`POINT (30.123 -10.456)`,
|
||||
// wkb:{0x01, 0x01, 0x00, 0x00, 0x00, 0xD2, 0x4A, 0x4D, 0x6A, 0x8B, 0x3C, 0x5C, 0x0A, 0x0D, 0x1B, 0x4F, 0x4F, 0x9A, 0x3D, 0x4},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
IsDynamic: false,
|
||||
}
|
||||
fieldDatas = append(fieldDatas, &fieldData12)
|
||||
|
||||
switch firstFieldType {
|
||||
case schemapb.DataType_None:
|
||||
return fieldDatas
|
||||
@ -1769,6 +1802,8 @@ func newFieldData(fieldDatas []*schemapb.FieldData, firstFieldType schemapb.Data
|
||||
return []*schemapb.FieldData{&fieldData10}
|
||||
case schemapb.DataType_JSON:
|
||||
return []*schemapb.FieldData{&fieldData9}
|
||||
case schemapb.DataType_Geometry:
|
||||
return []*schemapb.FieldData{&fieldData12}
|
||||
case schemapb.DataType_SparseFloatVector:
|
||||
vectorField := generateVectorFieldData(firstFieldType)
|
||||
return []*schemapb.FieldData{&vectorField}
|
||||
@ -2037,6 +2072,7 @@ func newSearchResult(results []map[string]interface{}) []map[string]interface{}
|
||||
result["field-varchar"] = strconv.Itoa(i)
|
||||
result["field-string"] = strconv.Itoa(i)
|
||||
result["field-json"] = []byte(`{"XXX": 0}`)
|
||||
result["field-geometry"] = `POINT (30.123 -10.456)`
|
||||
result["field-array"] = []bool{true}
|
||||
result["array-bool"] = []bool{true}
|
||||
result["array-int8"] = []int32{0}
|
||||
@ -2338,6 +2374,7 @@ func TestBuildQueryResps(t *testing.T) {
|
||||
schemapb.DataType_Float, schemapb.DataType_Double,
|
||||
schemapb.DataType_String, schemapb.DataType_VarChar,
|
||||
schemapb.DataType_JSON, schemapb.DataType_Array,
|
||||
schemapb.DataType_Geometry,
|
||||
}
|
||||
for _, dateType := range dataTypes {
|
||||
_, err := buildQueryResp(int64(0), outputFields, newFieldData([]*schemapb.FieldData{}, dateType), generateIDs(schemapb.DataType_Int64, 3), DefaultScores, true, nil)
|
||||
|
||||
@ -27,6 +27,14 @@ expr:
|
||||
| (JSONContains | ArrayContains)'('expr',' expr')' # JSONContains
|
||||
| (JSONContainsAll | ArrayContainsAll)'('expr',' expr')' # JSONContainsAll
|
||||
| (JSONContainsAny | ArrayContainsAny)'('expr',' expr')' # JSONContainsAny
|
||||
| STEuqals'('Identifier','StringLiteral')' # STEuqals
|
||||
| STTouches'('Identifier','StringLiteral')' # STTouches
|
||||
| STOverlaps'('Identifier','StringLiteral')' # STOverlaps
|
||||
| STCrosses'('Identifier','StringLiteral')' # STCrosses
|
||||
| STContains'('Identifier','StringLiteral')' # STContains
|
||||
| STIntersects'('Identifier','StringLiteral')' # STIntersects
|
||||
| STWithin'('Identifier','StringLiteral')' # STWithin
|
||||
| STDWithin'('Identifier','StringLiteral',' expr')' # STDWithin
|
||||
| ArrayLength'('(Identifier | JSONIdentifier)')' # ArrayLength
|
||||
| Identifier '(' ( expr (',' expr )* ','? )? ')' # Call
|
||||
| expr op1 = (LT | LE) (Identifier | JSONIdentifier) op2 = (LT | LE) expr # Range
|
||||
@ -101,6 +109,15 @@ ArrayContainsAll: 'array_contains_all' | 'ARRAY_CONTAINS_ALL';
|
||||
ArrayContainsAny: 'array_contains_any' | 'ARRAY_CONTAINS_ANY';
|
||||
ArrayLength: 'array_length' | 'ARRAY_LENGTH';
|
||||
|
||||
STEuqals:'st_equals' | 'ST_EQUALS';
|
||||
STTouches:'st_touches' | 'ST_TOUCHES';
|
||||
STOverlaps: 'st_overlaps' | 'ST_OVERLAPS';
|
||||
STCrosses: 'st_crosses' | 'ST_CROSSES';
|
||||
STContains: 'st_contains' | 'ST_CONTAINS';
|
||||
STIntersects : 'st_intersects' | 'ST_INTERSECTS';
|
||||
STWithin :'st_within' | 'ST_WITHIN';
|
||||
STDWithin: 'st_dwithin' | 'ST_DWITHIN';
|
||||
|
||||
BooleanConstant: 'true' | 'True' | 'TRUE' | 'false' | 'False' | 'FALSE';
|
||||
|
||||
IntegerConstant:
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -44,15 +44,23 @@ ArrayContains=43
|
||||
ArrayContainsAll=44
|
||||
ArrayContainsAny=45
|
||||
ArrayLength=46
|
||||
BooleanConstant=47
|
||||
IntegerConstant=48
|
||||
FloatingConstant=49
|
||||
Identifier=50
|
||||
Meta=51
|
||||
StringLiteral=52
|
||||
JSONIdentifier=53
|
||||
Whitespace=54
|
||||
Newline=55
|
||||
STEuqals=47
|
||||
STTouches=48
|
||||
STOverlaps=49
|
||||
STCrosses=50
|
||||
STContains=51
|
||||
STIntersects=52
|
||||
STWithin=53
|
||||
STDWithin=54
|
||||
BooleanConstant=55
|
||||
IntegerConstant=56
|
||||
FloatingConstant=57
|
||||
Identifier=58
|
||||
Meta=59
|
||||
StringLiteral=60
|
||||
JSONIdentifier=61
|
||||
Whitespace=62
|
||||
Newline=63
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
@ -78,4 +86,4 @@ Newline=55
|
||||
'|'=30
|
||||
'^'=31
|
||||
'~'=36
|
||||
'$meta'=51
|
||||
'$meta'=59
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -44,15 +44,23 @@ ArrayContains=43
|
||||
ArrayContainsAll=44
|
||||
ArrayContainsAny=45
|
||||
ArrayLength=46
|
||||
BooleanConstant=47
|
||||
IntegerConstant=48
|
||||
FloatingConstant=49
|
||||
Identifier=50
|
||||
Meta=51
|
||||
StringLiteral=52
|
||||
JSONIdentifier=53
|
||||
Whitespace=54
|
||||
Newline=55
|
||||
STEuqals=47
|
||||
STTouches=48
|
||||
STOverlaps=49
|
||||
STCrosses=50
|
||||
STContains=51
|
||||
STIntersects=52
|
||||
STWithin=53
|
||||
STDWithin=54
|
||||
BooleanConstant=55
|
||||
IntegerConstant=56
|
||||
FloatingConstant=57
|
||||
Identifier=58
|
||||
Meta=59
|
||||
StringLiteral=60
|
||||
JSONIdentifier=61
|
||||
Whitespace=62
|
||||
Newline=63
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
@ -78,4 +86,4 @@ Newline=55
|
||||
'|'=30
|
||||
'^'=31
|
||||
'~'=36
|
||||
'$meta'=51
|
||||
'$meta'=59
|
||||
|
||||
@ -47,6 +47,10 @@ func (v *BasePlanVisitor) VisitIdentifier(ctx *IdentifierContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitSTIntersects(ctx *STIntersectsContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitLike(ctx *LikeContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
@ -67,6 +71,10 @@ func (v *BasePlanVisitor) VisitBoolean(ctx *BooleanContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitSTDWithin(ctx *STDWithinContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitShift(ctx *ShiftContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
@ -75,6 +83,10 @@ func (v *BasePlanVisitor) VisitCall(ctx *CallContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitSTCrosses(ctx *STCrossesContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitReverseRange(ctx *ReverseRangeContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
@ -107,6 +119,14 @@ func (v *BasePlanVisitor) VisitTextMatch(ctx *TextMatchContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitSTTouches(ctx *STTouchesContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitSTContains(ctx *STContainsContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitTerm(ctx *TermContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
@ -115,6 +135,10 @@ func (v *BasePlanVisitor) VisitJSONContains(ctx *JSONContainsContext) interface{
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitSTWithin(ctx *STWithinContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitRange(ctx *RangeContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
@ -151,6 +175,10 @@ func (v *BasePlanVisitor) VisitBitAnd(ctx *BitAndContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitSTEuqals(ctx *STEuqalsContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitIsNull(ctx *IsNullContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
@ -158,3 +186,7 @@ func (v *BasePlanVisitor) VisitIsNull(ctx *IsNullContext) interface{} {
|
||||
func (v *BasePlanVisitor) VisitPower(ctx *PowerContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
func (v *BasePlanVisitor) VisitSTOverlaps(ctx *STOverlapsContext) interface{} {
|
||||
return v.VisitChildren(ctx)
|
||||
}
|
||||
|
||||
@ -47,7 +47,7 @@ func planlexerLexerInit() {
|
||||
"'>'", "'>='", "'=='", "'!='", "", "", "", "", "", "", "", "'+'", "'-'",
|
||||
"'*'", "'/'", "'%'", "'**'", "'<<'", "'>>'", "'&'", "'|'", "'^'", "",
|
||||
"", "", "", "'~'", "", "", "", "", "", "", "", "", "", "", "", "", "",
|
||||
"", "'$meta'",
|
||||
"", "", "", "", "", "", "", "", "", "'$meta'",
|
||||
}
|
||||
staticData.SymbolicNames = []string{
|
||||
"", "", "", "", "", "", "LBRACE", "RBRACE", "LT", "LE", "GT", "GE",
|
||||
@ -56,8 +56,10 @@ func planlexerLexerInit() {
|
||||
"SHR", "BAND", "BOR", "BXOR", "AND", "OR", "ISNULL", "ISNOTNULL", "BNOT",
|
||||
"NOT", "IN", "EmptyArray", "JSONContains", "JSONContainsAll", "JSONContainsAny",
|
||||
"ArrayContains", "ArrayContainsAll", "ArrayContainsAny", "ArrayLength",
|
||||
"BooleanConstant", "IntegerConstant", "FloatingConstant", "Identifier",
|
||||
"Meta", "StringLiteral", "JSONIdentifier", "Whitespace", "Newline",
|
||||
"STEuqals", "STTouches", "STOverlaps", "STCrosses", "STContains", "STIntersects",
|
||||
"STWithin", "STDWithin", "BooleanConstant", "IntegerConstant", "FloatingConstant",
|
||||
"Identifier", "Meta", "StringLiteral", "JSONIdentifier", "Whitespace",
|
||||
"Newline",
|
||||
}
|
||||
staticData.RuleNames = []string{
|
||||
"T__0", "T__1", "T__2", "T__3", "T__4", "LBRACE", "RBRACE", "LT", "LE",
|
||||
@ -66,434 +68,527 @@ func planlexerLexerInit() {
|
||||
"POW", "SHL", "SHR", "BAND", "BOR", "BXOR", "AND", "OR", "ISNULL", "ISNOTNULL",
|
||||
"BNOT", "NOT", "IN", "EmptyArray", "JSONContains", "JSONContainsAll",
|
||||
"JSONContainsAny", "ArrayContains", "ArrayContainsAll", "ArrayContainsAny",
|
||||
"ArrayLength", "BooleanConstant", "IntegerConstant", "FloatingConstant",
|
||||
"Identifier", "Meta", "StringLiteral", "JSONIdentifier", "EncodingPrefix",
|
||||
"DoubleSCharSequence", "SingleSCharSequence", "DoubleSChar", "SingleSChar",
|
||||
"Nondigit", "Digit", "BinaryConstant", "DecimalConstant", "OctalConstant",
|
||||
"HexadecimalConstant", "NonzeroDigit", "OctalDigit", "HexadecimalDigit",
|
||||
"HexQuad", "UniversalCharacterName", "DecimalFloatingConstant", "HexadecimalFloatingConstant",
|
||||
"FractionalConstant", "ExponentPart", "DigitSequence", "HexadecimalFractionalConstant",
|
||||
"HexadecimalDigitSequence", "BinaryExponentPart", "EscapeSequence",
|
||||
"Whitespace", "Newline",
|
||||
"ArrayLength", "STEuqals", "STTouches", "STOverlaps", "STCrosses", "STContains",
|
||||
"STIntersects", "STWithin", "STDWithin", "BooleanConstant", "IntegerConstant",
|
||||
"FloatingConstant", "Identifier", "Meta", "StringLiteral", "JSONIdentifier",
|
||||
"EncodingPrefix", "DoubleSCharSequence", "SingleSCharSequence", "DoubleSChar",
|
||||
"SingleSChar", "Nondigit", "Digit", "BinaryConstant", "DecimalConstant",
|
||||
"OctalConstant", "HexadecimalConstant", "NonzeroDigit", "OctalDigit",
|
||||
"HexadecimalDigit", "HexQuad", "UniversalCharacterName", "DecimalFloatingConstant",
|
||||
"HexadecimalFloatingConstant", "FractionalConstant", "ExponentPart",
|
||||
"DigitSequence", "HexadecimalFractionalConstant", "HexadecimalDigitSequence",
|
||||
"BinaryExponentPart", "EscapeSequence", "Whitespace", "Newline",
|
||||
}
|
||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||
staticData.serializedATN = []int32{
|
||||
4, 0, 55, 922, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
|
||||
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
|
||||
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
|
||||
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
|
||||
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
|
||||
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
|
||||
7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7,
|
||||
41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46,
|
||||
2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2,
|
||||
52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57,
|
||||
7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7,
|
||||
62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67,
|
||||
2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2,
|
||||
73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78,
|
||||
7, 78, 2, 79, 7, 79, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
|
||||
4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1,
|
||||
9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13,
|
||||
1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 200, 8, 13, 1,
|
||||
14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14,
|
||||
1, 14, 3, 14, 214, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1,
|
||||
4, 0, 63, 1120, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3,
|
||||
2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9,
|
||||
2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2,
|
||||
15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20,
|
||||
7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7,
|
||||
25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30,
|
||||
2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2,
|
||||
36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41,
|
||||
7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7,
|
||||
46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51,
|
||||
2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2,
|
||||
57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62,
|
||||
7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7,
|
||||
67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72,
|
||||
2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2,
|
||||
78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83,
|
||||
7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 1, 0, 1,
|
||||
0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1,
|
||||
6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 11,
|
||||
1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1,
|
||||
13, 1, 13, 1, 13, 3, 13, 216, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14,
|
||||
1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 230, 8, 14, 1,
|
||||
15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15,
|
||||
1, 15, 1, 15, 1, 15, 3, 15, 236, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1,
|
||||
1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 252,
|
||||
8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1,
|
||||
16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
|
||||
1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 262,
|
||||
8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1,
|
||||
1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 278, 8, 16, 1, 17, 1, 17, 1, 17, 1,
|
||||
17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17,
|
||||
1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 290, 8, 17, 1, 18, 1,
|
||||
18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18,
|
||||
1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 308, 8, 18, 1, 19, 1, 19, 1, 19, 1,
|
||||
19, 1, 19, 1, 19, 3, 19, 316, 8, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22,
|
||||
1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1,
|
||||
26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31,
|
||||
1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 3, 31, 351, 8, 31, 1,
|
||||
32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 359, 8, 32, 1, 33, 1, 33,
|
||||
1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1,
|
||||
33, 1, 33, 3, 33, 375, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34,
|
||||
1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1,
|
||||
17, 1, 17, 3, 17, 306, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18,
|
||||
1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3,
|
||||
18, 324, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 332, 8,
|
||||
19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24,
|
||||
1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1,
|
||||
28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31,
|
||||
1, 31, 1, 31, 3, 31, 367, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1,
|
||||
32, 3, 32, 375, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33,
|
||||
1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 391, 8, 33, 1,
|
||||
34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34,
|
||||
1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1,
|
||||
34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 399, 8, 34, 1, 35, 1, 35,
|
||||
1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 410, 8, 36, 1,
|
||||
37, 1, 37, 1, 37, 1, 37, 3, 37, 416, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38,
|
||||
421, 8, 38, 10, 38, 12, 38, 424, 9, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1,
|
||||
39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39,
|
||||
34, 3, 34, 415, 8, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36,
|
||||
1, 36, 1, 36, 3, 36, 426, 8, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 432,
|
||||
8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 437, 8, 38, 10, 38, 12, 38, 440, 9,
|
||||
38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39,
|
||||
1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1,
|
||||
39, 1, 39, 1, 39, 3, 39, 454, 8, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40,
|
||||
39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 470, 8, 39,
|
||||
1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1,
|
||||
40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40,
|
||||
1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 490, 8,
|
||||
40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41,
|
||||
1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1,
|
||||
40, 1, 40, 1, 40, 3, 40, 506, 8, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41,
|
||||
1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1,
|
||||
41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41,
|
||||
1, 41, 1, 41, 1, 41, 3, 41, 526, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1,
|
||||
42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42,
|
||||
1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 542, 8,
|
||||
41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42,
|
||||
1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1,
|
||||
42, 1, 42, 1, 42, 3, 42, 556, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43,
|
||||
42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 572, 8, 42,
|
||||
1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1,
|
||||
43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43,
|
||||
1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 3,
|
||||
43, 594, 8, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44,
|
||||
1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1,
|
||||
43, 1, 43, 1, 43, 1, 43, 1, 43, 3, 43, 610, 8, 43, 1, 44, 1, 44, 1, 44,
|
||||
1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1,
|
||||
44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44,
|
||||
1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 632, 8, 44, 1,
|
||||
45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45,
|
||||
1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1,
|
||||
44, 1, 44, 3, 44, 648, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45,
|
||||
1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1,
|
||||
45, 1, 45, 1, 45, 3, 45, 658, 8, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46,
|
||||
45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 674, 8, 45,
|
||||
1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1,
|
||||
46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46,
|
||||
1, 46, 3, 46, 687, 8, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 693, 8, 47,
|
||||
1, 48, 1, 48, 3, 48, 697, 8, 48, 1, 49, 1, 49, 1, 49, 5, 49, 702, 8, 49,
|
||||
10, 49, 12, 49, 705, 9, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1,
|
||||
51, 3, 51, 714, 8, 51, 1, 51, 1, 51, 3, 51, 718, 8, 51, 1, 51, 1, 51, 1,
|
||||
51, 3, 51, 723, 8, 51, 1, 51, 3, 51, 726, 8, 51, 1, 52, 1, 52, 3, 52, 730,
|
||||
8, 52, 1, 52, 1, 52, 1, 52, 3, 52, 735, 8, 52, 1, 52, 1, 52, 4, 52, 739,
|
||||
8, 52, 11, 52, 12, 52, 740, 1, 53, 1, 53, 1, 53, 3, 53, 746, 8, 53, 1,
|
||||
54, 4, 54, 749, 8, 54, 11, 54, 12, 54, 750, 1, 55, 4, 55, 754, 8, 55, 11,
|
||||
55, 12, 55, 755, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56,
|
||||
765, 8, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 3, 57, 774,
|
||||
8, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 4, 60, 783, 8,
|
||||
60, 11, 60, 12, 60, 784, 1, 61, 1, 61, 5, 61, 789, 8, 61, 10, 61, 12, 61,
|
||||
792, 9, 61, 1, 61, 3, 61, 795, 8, 61, 1, 62, 1, 62, 5, 62, 799, 8, 62,
|
||||
10, 62, 12, 62, 802, 9, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1,
|
||||
65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68,
|
||||
1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 3, 68, 829, 8,
|
||||
68, 1, 69, 1, 69, 3, 69, 833, 8, 69, 1, 69, 1, 69, 1, 69, 3, 69, 838, 8,
|
||||
69, 1, 70, 1, 70, 1, 70, 1, 70, 3, 70, 844, 8, 70, 1, 70, 1, 70, 1, 71,
|
||||
3, 71, 849, 8, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 3, 71, 856, 8, 71,
|
||||
1, 72, 1, 72, 3, 72, 860, 8, 72, 1, 72, 1, 72, 1, 73, 4, 73, 865, 8, 73,
|
||||
11, 73, 12, 73, 866, 1, 74, 3, 74, 870, 8, 74, 1, 74, 1, 74, 1, 74, 1,
|
||||
74, 1, 74, 3, 74, 877, 8, 74, 1, 75, 4, 75, 880, 8, 75, 11, 75, 12, 75,
|
||||
881, 1, 76, 1, 76, 3, 76, 886, 8, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77,
|
||||
1, 77, 1, 77, 3, 77, 895, 8, 77, 1, 77, 3, 77, 898, 8, 77, 1, 77, 1, 77,
|
||||
1, 77, 1, 77, 1, 77, 3, 77, 905, 8, 77, 1, 78, 4, 78, 908, 8, 78, 11, 78,
|
||||
12, 78, 909, 1, 78, 1, 78, 1, 79, 1, 79, 3, 79, 916, 8, 79, 1, 79, 3, 79,
|
||||
919, 8, 79, 1, 79, 1, 79, 0, 0, 80, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6,
|
||||
13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31,
|
||||
16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49,
|
||||
25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33, 67,
|
||||
34, 69, 35, 71, 36, 73, 37, 75, 38, 77, 39, 79, 40, 81, 41, 83, 42, 85,
|
||||
43, 87, 44, 89, 45, 91, 46, 93, 47, 95, 48, 97, 49, 99, 50, 101, 51, 103,
|
||||
52, 105, 53, 107, 0, 109, 0, 111, 0, 113, 0, 115, 0, 117, 0, 119, 0, 121,
|
||||
0, 123, 0, 125, 0, 127, 0, 129, 0, 131, 0, 133, 0, 135, 0, 137, 0, 139,
|
||||
46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 694, 8, 46,
|
||||
1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1,
|
||||
47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47,
|
||||
716, 8, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1,
|
||||
48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48,
|
||||
1, 48, 1, 48, 1, 48, 3, 48, 740, 8, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1,
|
||||
49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49,
|
||||
1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 3, 49, 762, 8, 49, 1, 50, 1, 50, 1,
|
||||
50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50,
|
||||
1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 3, 50, 786,
|
||||
8, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1,
|
||||
51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51,
|
||||
1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 814, 8, 51, 1, 52, 1,
|
||||
52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52,
|
||||
1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 834, 8, 52, 1, 53, 1,
|
||||
53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53,
|
||||
1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 856, 8,
|
||||
53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54,
|
||||
1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1,
|
||||
54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 3, 54, 885, 8, 54, 1, 55,
|
||||
1, 55, 1, 55, 1, 55, 3, 55, 891, 8, 55, 1, 56, 1, 56, 3, 56, 895, 8, 56,
|
||||
1, 57, 1, 57, 1, 57, 5, 57, 900, 8, 57, 10, 57, 12, 57, 903, 9, 57, 1,
|
||||
58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 3, 59, 912, 8, 59, 1, 59,
|
||||
1, 59, 3, 59, 916, 8, 59, 1, 59, 1, 59, 1, 59, 3, 59, 921, 8, 59, 1, 59,
|
||||
3, 59, 924, 8, 59, 1, 60, 1, 60, 3, 60, 928, 8, 60, 1, 60, 1, 60, 1, 60,
|
||||
3, 60, 933, 8, 60, 1, 60, 1, 60, 4, 60, 937, 8, 60, 11, 60, 12, 60, 938,
|
||||
1, 61, 1, 61, 1, 61, 3, 61, 944, 8, 61, 1, 62, 4, 62, 947, 8, 62, 11, 62,
|
||||
12, 62, 948, 1, 63, 4, 63, 952, 8, 63, 11, 63, 12, 63, 953, 1, 64, 1, 64,
|
||||
1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 3, 64, 963, 8, 64, 1, 65, 1, 65, 1,
|
||||
65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 972, 8, 65, 1, 66, 1, 66, 1, 67,
|
||||
1, 67, 1, 68, 1, 68, 1, 68, 4, 68, 981, 8, 68, 11, 68, 12, 68, 982, 1,
|
||||
69, 1, 69, 5, 69, 987, 8, 69, 10, 69, 12, 69, 990, 9, 69, 1, 69, 3, 69,
|
||||
993, 8, 69, 1, 70, 1, 70, 5, 70, 997, 8, 70, 10, 70, 12, 70, 1000, 9, 70,
|
||||
1, 71, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1,
|
||||
75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76,
|
||||
1, 76, 1, 76, 1, 76, 1, 76, 3, 76, 1027, 8, 76, 1, 77, 1, 77, 3, 77, 1031,
|
||||
8, 77, 1, 77, 1, 77, 1, 77, 3, 77, 1036, 8, 77, 1, 78, 1, 78, 1, 78, 1,
|
||||
78, 3, 78, 1042, 8, 78, 1, 78, 1, 78, 1, 79, 3, 79, 1047, 8, 79, 1, 79,
|
||||
1, 79, 1, 79, 1, 79, 1, 79, 3, 79, 1054, 8, 79, 1, 80, 1, 80, 3, 80, 1058,
|
||||
8, 80, 1, 80, 1, 80, 1, 81, 4, 81, 1063, 8, 81, 11, 81, 12, 81, 1064, 1,
|
||||
82, 3, 82, 1068, 8, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 3, 82, 1075,
|
||||
8, 82, 1, 83, 4, 83, 1078, 8, 83, 11, 83, 12, 83, 1079, 1, 84, 1, 84, 3,
|
||||
84, 1084, 8, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 3, 85,
|
||||
1093, 8, 85, 1, 85, 3, 85, 1096, 8, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1,
|
||||
85, 3, 85, 1103, 8, 85, 1, 86, 4, 86, 1106, 8, 86, 11, 86, 12, 86, 1107,
|
||||
1, 86, 1, 86, 1, 87, 1, 87, 3, 87, 1114, 8, 87, 1, 87, 3, 87, 1117, 8,
|
||||
87, 1, 87, 1, 87, 0, 0, 88, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7,
|
||||
15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33,
|
||||
17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51,
|
||||
26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33, 67, 34, 69,
|
||||
35, 71, 36, 73, 37, 75, 38, 77, 39, 79, 40, 81, 41, 83, 42, 85, 43, 87,
|
||||
44, 89, 45, 91, 46, 93, 47, 95, 48, 97, 49, 99, 50, 101, 51, 103, 52, 105,
|
||||
53, 107, 54, 109, 55, 111, 56, 113, 57, 115, 58, 117, 59, 119, 60, 121,
|
||||
61, 123, 0, 125, 0, 127, 0, 129, 0, 131, 0, 133, 0, 135, 0, 137, 0, 139,
|
||||
0, 141, 0, 143, 0, 145, 0, 147, 0, 149, 0, 151, 0, 153, 0, 155, 0, 157,
|
||||
54, 159, 55, 1, 0, 16, 3, 0, 76, 76, 85, 85, 117, 117, 4, 0, 10, 10, 13,
|
||||
13, 34, 34, 92, 92, 4, 0, 10, 10, 13, 13, 39, 39, 92, 92, 3, 0, 65, 90,
|
||||
95, 95, 97, 122, 1, 0, 48, 57, 2, 0, 66, 66, 98, 98, 1, 0, 48, 49, 2, 0,
|
||||
88, 88, 120, 120, 1, 0, 49, 57, 1, 0, 48, 55, 3, 0, 48, 57, 65, 70, 97,
|
||||
102, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 2, 0, 80, 80, 112, 112,
|
||||
10, 0, 34, 34, 39, 39, 63, 63, 92, 92, 97, 98, 102, 102, 110, 110, 114,
|
||||
114, 116, 116, 118, 118, 2, 0, 9, 9, 32, 32, 972, 0, 1, 1, 0, 0, 0, 0,
|
||||
3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0,
|
||||
11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0,
|
||||
0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0,
|
||||
0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0,
|
||||
0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1,
|
||||
0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49,
|
||||
1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0,
|
||||
57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0,
|
||||
0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0,
|
||||
0, 0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, 1, 0,
|
||||
0, 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1,
|
||||
0, 0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 0, 95,
|
||||
1, 0, 0, 0, 0, 97, 1, 0, 0, 0, 0, 99, 1, 0, 0, 0, 0, 101, 1, 0, 0, 0, 0,
|
||||
103, 1, 0, 0, 0, 0, 105, 1, 0, 0, 0, 0, 157, 1, 0, 0, 0, 0, 159, 1, 0,
|
||||
0, 0, 1, 161, 1, 0, 0, 0, 3, 163, 1, 0, 0, 0, 5, 165, 1, 0, 0, 0, 7, 167,
|
||||
1, 0, 0, 0, 9, 169, 1, 0, 0, 0, 11, 171, 1, 0, 0, 0, 13, 173, 1, 0, 0,
|
||||
0, 15, 175, 1, 0, 0, 0, 17, 177, 1, 0, 0, 0, 19, 180, 1, 0, 0, 0, 21, 182,
|
||||
1, 0, 0, 0, 23, 185, 1, 0, 0, 0, 25, 188, 1, 0, 0, 0, 27, 199, 1, 0, 0,
|
||||
0, 29, 213, 1, 0, 0, 0, 31, 235, 1, 0, 0, 0, 33, 261, 1, 0, 0, 0, 35, 289,
|
||||
1, 0, 0, 0, 37, 307, 1, 0, 0, 0, 39, 315, 1, 0, 0, 0, 41, 317, 1, 0, 0,
|
||||
0, 43, 319, 1, 0, 0, 0, 45, 321, 1, 0, 0, 0, 47, 323, 1, 0, 0, 0, 49, 325,
|
||||
1, 0, 0, 0, 51, 327, 1, 0, 0, 0, 53, 330, 1, 0, 0, 0, 55, 333, 1, 0, 0,
|
||||
0, 57, 336, 1, 0, 0, 0, 59, 338, 1, 0, 0, 0, 61, 340, 1, 0, 0, 0, 63, 350,
|
||||
1, 0, 0, 0, 65, 358, 1, 0, 0, 0, 67, 374, 1, 0, 0, 0, 69, 398, 1, 0, 0,
|
||||
0, 71, 400, 1, 0, 0, 0, 73, 409, 1, 0, 0, 0, 75, 415, 1, 0, 0, 0, 77, 417,
|
||||
1, 0, 0, 0, 79, 453, 1, 0, 0, 0, 81, 489, 1, 0, 0, 0, 83, 525, 1, 0, 0,
|
||||
0, 85, 555, 1, 0, 0, 0, 87, 593, 1, 0, 0, 0, 89, 631, 1, 0, 0, 0, 91, 657,
|
||||
1, 0, 0, 0, 93, 686, 1, 0, 0, 0, 95, 692, 1, 0, 0, 0, 97, 696, 1, 0, 0,
|
||||
0, 99, 698, 1, 0, 0, 0, 101, 706, 1, 0, 0, 0, 103, 713, 1, 0, 0, 0, 105,
|
||||
729, 1, 0, 0, 0, 107, 745, 1, 0, 0, 0, 109, 748, 1, 0, 0, 0, 111, 753,
|
||||
1, 0, 0, 0, 113, 764, 1, 0, 0, 0, 115, 773, 1, 0, 0, 0, 117, 775, 1, 0,
|
||||
0, 0, 119, 777, 1, 0, 0, 0, 121, 779, 1, 0, 0, 0, 123, 794, 1, 0, 0, 0,
|
||||
125, 796, 1, 0, 0, 0, 127, 803, 1, 0, 0, 0, 129, 807, 1, 0, 0, 0, 131,
|
||||
809, 1, 0, 0, 0, 133, 811, 1, 0, 0, 0, 135, 813, 1, 0, 0, 0, 137, 828,
|
||||
1, 0, 0, 0, 139, 837, 1, 0, 0, 0, 141, 839, 1, 0, 0, 0, 143, 855, 1, 0,
|
||||
0, 0, 145, 857, 1, 0, 0, 0, 147, 864, 1, 0, 0, 0, 149, 876, 1, 0, 0, 0,
|
||||
151, 879, 1, 0, 0, 0, 153, 883, 1, 0, 0, 0, 155, 904, 1, 0, 0, 0, 157,
|
||||
907, 1, 0, 0, 0, 159, 918, 1, 0, 0, 0, 161, 162, 5, 40, 0, 0, 162, 2, 1,
|
||||
0, 0, 0, 163, 164, 5, 41, 0, 0, 164, 4, 1, 0, 0, 0, 165, 166, 5, 91, 0,
|
||||
0, 166, 6, 1, 0, 0, 0, 167, 168, 5, 44, 0, 0, 168, 8, 1, 0, 0, 0, 169,
|
||||
170, 5, 93, 0, 0, 170, 10, 1, 0, 0, 0, 171, 172, 5, 123, 0, 0, 172, 12,
|
||||
1, 0, 0, 0, 173, 174, 5, 125, 0, 0, 174, 14, 1, 0, 0, 0, 175, 176, 5, 60,
|
||||
0, 0, 176, 16, 1, 0, 0, 0, 177, 178, 5, 60, 0, 0, 178, 179, 5, 61, 0, 0,
|
||||
179, 18, 1, 0, 0, 0, 180, 181, 5, 62, 0, 0, 181, 20, 1, 0, 0, 0, 182, 183,
|
||||
5, 62, 0, 0, 183, 184, 5, 61, 0, 0, 184, 22, 1, 0, 0, 0, 185, 186, 5, 61,
|
||||
0, 0, 186, 187, 5, 61, 0, 0, 187, 24, 1, 0, 0, 0, 188, 189, 5, 33, 0, 0,
|
||||
189, 190, 5, 61, 0, 0, 190, 26, 1, 0, 0, 0, 191, 192, 5, 108, 0, 0, 192,
|
||||
193, 5, 105, 0, 0, 193, 194, 5, 107, 0, 0, 194, 200, 5, 101, 0, 0, 195,
|
||||
196, 5, 76, 0, 0, 196, 197, 5, 73, 0, 0, 197, 198, 5, 75, 0, 0, 198, 200,
|
||||
5, 69, 0, 0, 199, 191, 1, 0, 0, 0, 199, 195, 1, 0, 0, 0, 200, 28, 1, 0,
|
||||
0, 0, 201, 202, 5, 101, 0, 0, 202, 203, 5, 120, 0, 0, 203, 204, 5, 105,
|
||||
0, 0, 204, 205, 5, 115, 0, 0, 205, 206, 5, 116, 0, 0, 206, 214, 5, 115,
|
||||
0, 0, 207, 208, 5, 69, 0, 0, 208, 209, 5, 88, 0, 0, 209, 210, 5, 73, 0,
|
||||
0, 210, 211, 5, 83, 0, 0, 211, 212, 5, 84, 0, 0, 212, 214, 5, 83, 0, 0,
|
||||
213, 201, 1, 0, 0, 0, 213, 207, 1, 0, 0, 0, 214, 30, 1, 0, 0, 0, 215, 216,
|
||||
5, 116, 0, 0, 216, 217, 5, 101, 0, 0, 217, 218, 5, 120, 0, 0, 218, 219,
|
||||
5, 116, 0, 0, 219, 220, 5, 95, 0, 0, 220, 221, 5, 109, 0, 0, 221, 222,
|
||||
5, 97, 0, 0, 222, 223, 5, 116, 0, 0, 223, 224, 5, 99, 0, 0, 224, 236, 5,
|
||||
104, 0, 0, 225, 226, 5, 84, 0, 0, 226, 227, 5, 69, 0, 0, 227, 228, 5, 88,
|
||||
0, 0, 228, 229, 5, 84, 0, 0, 229, 230, 5, 95, 0, 0, 230, 231, 5, 77, 0,
|
||||
0, 231, 232, 5, 65, 0, 0, 232, 233, 5, 84, 0, 0, 233, 234, 5, 67, 0, 0,
|
||||
234, 236, 5, 72, 0, 0, 235, 215, 1, 0, 0, 0, 235, 225, 1, 0, 0, 0, 236,
|
||||
32, 1, 0, 0, 0, 237, 238, 5, 112, 0, 0, 238, 239, 5, 104, 0, 0, 239, 240,
|
||||
5, 114, 0, 0, 240, 241, 5, 97, 0, 0, 241, 242, 5, 115, 0, 0, 242, 243,
|
||||
5, 101, 0, 0, 243, 244, 5, 95, 0, 0, 244, 245, 5, 109, 0, 0, 245, 246,
|
||||
5, 97, 0, 0, 246, 247, 5, 116, 0, 0, 247, 248, 5, 99, 0, 0, 248, 262, 5,
|
||||
104, 0, 0, 249, 250, 5, 80, 0, 0, 250, 251, 5, 72, 0, 0, 251, 252, 5, 82,
|
||||
0, 0, 252, 253, 5, 65, 0, 0, 253, 254, 5, 83, 0, 0, 254, 255, 5, 69, 0,
|
||||
0, 255, 256, 5, 95, 0, 0, 256, 257, 5, 77, 0, 0, 257, 258, 5, 65, 0, 0,
|
||||
258, 259, 5, 84, 0, 0, 259, 260, 5, 67, 0, 0, 260, 262, 5, 72, 0, 0, 261,
|
||||
237, 1, 0, 0, 0, 261, 249, 1, 0, 0, 0, 262, 34, 1, 0, 0, 0, 263, 264, 5,
|
||||
114, 0, 0, 264, 265, 5, 97, 0, 0, 265, 266, 5, 110, 0, 0, 266, 267, 5,
|
||||
100, 0, 0, 267, 268, 5, 111, 0, 0, 268, 269, 5, 109, 0, 0, 269, 270, 5,
|
||||
95, 0, 0, 270, 271, 5, 115, 0, 0, 271, 272, 5, 97, 0, 0, 272, 273, 5, 109,
|
||||
0, 0, 273, 274, 5, 112, 0, 0, 274, 275, 5, 108, 0, 0, 275, 290, 5, 101,
|
||||
0, 0, 276, 277, 5, 82, 0, 0, 277, 278, 5, 65, 0, 0, 278, 279, 5, 78, 0,
|
||||
0, 279, 280, 5, 68, 0, 0, 280, 281, 5, 79, 0, 0, 281, 282, 5, 77, 0, 0,
|
||||
282, 283, 5, 95, 0, 0, 283, 284, 5, 83, 0, 0, 284, 285, 5, 65, 0, 0, 285,
|
||||
286, 5, 77, 0, 0, 286, 287, 5, 80, 0, 0, 287, 288, 5, 76, 0, 0, 288, 290,
|
||||
5, 69, 0, 0, 289, 263, 1, 0, 0, 0, 289, 276, 1, 0, 0, 0, 290, 36, 1, 0,
|
||||
0, 0, 291, 292, 5, 105, 0, 0, 292, 293, 5, 110, 0, 0, 293, 294, 5, 116,
|
||||
0, 0, 294, 295, 5, 101, 0, 0, 295, 296, 5, 114, 0, 0, 296, 297, 5, 118,
|
||||
0, 0, 297, 298, 5, 97, 0, 0, 298, 308, 5, 108, 0, 0, 299, 300, 5, 73, 0,
|
||||
0, 300, 301, 5, 78, 0, 0, 301, 302, 5, 84, 0, 0, 302, 303, 5, 69, 0, 0,
|
||||
303, 304, 5, 82, 0, 0, 304, 305, 5, 86, 0, 0, 305, 306, 5, 65, 0, 0, 306,
|
||||
308, 5, 76, 0, 0, 307, 291, 1, 0, 0, 0, 307, 299, 1, 0, 0, 0, 308, 38,
|
||||
1, 0, 0, 0, 309, 310, 5, 105, 0, 0, 310, 311, 5, 115, 0, 0, 311, 316, 5,
|
||||
111, 0, 0, 312, 313, 5, 73, 0, 0, 313, 314, 5, 83, 0, 0, 314, 316, 5, 79,
|
||||
0, 0, 315, 309, 1, 0, 0, 0, 315, 312, 1, 0, 0, 0, 316, 40, 1, 0, 0, 0,
|
||||
317, 318, 5, 43, 0, 0, 318, 42, 1, 0, 0, 0, 319, 320, 5, 45, 0, 0, 320,
|
||||
44, 1, 0, 0, 0, 321, 322, 5, 42, 0, 0, 322, 46, 1, 0, 0, 0, 323, 324, 5,
|
||||
47, 0, 0, 324, 48, 1, 0, 0, 0, 325, 326, 5, 37, 0, 0, 326, 50, 1, 0, 0,
|
||||
0, 327, 328, 5, 42, 0, 0, 328, 329, 5, 42, 0, 0, 329, 52, 1, 0, 0, 0, 330,
|
||||
331, 5, 60, 0, 0, 331, 332, 5, 60, 0, 0, 332, 54, 1, 0, 0, 0, 333, 334,
|
||||
5, 62, 0, 0, 334, 335, 5, 62, 0, 0, 335, 56, 1, 0, 0, 0, 336, 337, 5, 38,
|
||||
0, 0, 337, 58, 1, 0, 0, 0, 338, 339, 5, 124, 0, 0, 339, 60, 1, 0, 0, 0,
|
||||
340, 341, 5, 94, 0, 0, 341, 62, 1, 0, 0, 0, 342, 343, 5, 38, 0, 0, 343,
|
||||
351, 5, 38, 0, 0, 344, 345, 5, 97, 0, 0, 345, 346, 5, 110, 0, 0, 346, 351,
|
||||
5, 100, 0, 0, 347, 348, 5, 65, 0, 0, 348, 349, 5, 78, 0, 0, 349, 351, 5,
|
||||
68, 0, 0, 350, 342, 1, 0, 0, 0, 350, 344, 1, 0, 0, 0, 350, 347, 1, 0, 0,
|
||||
0, 351, 64, 1, 0, 0, 0, 352, 353, 5, 124, 0, 0, 353, 359, 5, 124, 0, 0,
|
||||
354, 355, 5, 111, 0, 0, 355, 359, 5, 114, 0, 0, 356, 357, 5, 79, 0, 0,
|
||||
357, 359, 5, 82, 0, 0, 358, 352, 1, 0, 0, 0, 358, 354, 1, 0, 0, 0, 358,
|
||||
356, 1, 0, 0, 0, 359, 66, 1, 0, 0, 0, 360, 361, 5, 105, 0, 0, 361, 362,
|
||||
5, 115, 0, 0, 362, 363, 5, 32, 0, 0, 363, 364, 5, 110, 0, 0, 364, 365,
|
||||
5, 117, 0, 0, 365, 366, 5, 108, 0, 0, 366, 375, 5, 108, 0, 0, 367, 368,
|
||||
5, 73, 0, 0, 368, 369, 5, 83, 0, 0, 369, 370, 5, 32, 0, 0, 370, 371, 5,
|
||||
78, 0, 0, 371, 372, 5, 85, 0, 0, 372, 373, 5, 76, 0, 0, 373, 375, 5, 76,
|
||||
0, 0, 374, 360, 1, 0, 0, 0, 374, 367, 1, 0, 0, 0, 375, 68, 1, 0, 0, 0,
|
||||
376, 377, 5, 105, 0, 0, 377, 378, 5, 115, 0, 0, 378, 379, 5, 32, 0, 0,
|
||||
379, 380, 5, 110, 0, 0, 380, 381, 5, 111, 0, 0, 381, 382, 5, 116, 0, 0,
|
||||
382, 383, 5, 32, 0, 0, 383, 384, 5, 110, 0, 0, 384, 385, 5, 117, 0, 0,
|
||||
385, 386, 5, 108, 0, 0, 386, 399, 5, 108, 0, 0, 387, 388, 5, 73, 0, 0,
|
||||
388, 389, 5, 83, 0, 0, 389, 390, 5, 32, 0, 0, 390, 391, 5, 78, 0, 0, 391,
|
||||
392, 5, 79, 0, 0, 392, 393, 5, 84, 0, 0, 393, 394, 5, 32, 0, 0, 394, 395,
|
||||
5, 78, 0, 0, 395, 396, 5, 85, 0, 0, 396, 397, 5, 76, 0, 0, 397, 399, 5,
|
||||
76, 0, 0, 398, 376, 1, 0, 0, 0, 398, 387, 1, 0, 0, 0, 399, 70, 1, 0, 0,
|
||||
0, 400, 401, 5, 126, 0, 0, 401, 72, 1, 0, 0, 0, 402, 410, 5, 33, 0, 0,
|
||||
403, 404, 5, 110, 0, 0, 404, 405, 5, 111, 0, 0, 405, 410, 5, 116, 0, 0,
|
||||
406, 407, 5, 78, 0, 0, 407, 408, 5, 79, 0, 0, 408, 410, 5, 84, 0, 0, 409,
|
||||
402, 1, 0, 0, 0, 409, 403, 1, 0, 0, 0, 409, 406, 1, 0, 0, 0, 410, 74, 1,
|
||||
0, 0, 0, 411, 412, 5, 105, 0, 0, 412, 416, 5, 110, 0, 0, 413, 414, 5, 73,
|
||||
0, 0, 414, 416, 5, 78, 0, 0, 415, 411, 1, 0, 0, 0, 415, 413, 1, 0, 0, 0,
|
||||
416, 76, 1, 0, 0, 0, 417, 422, 5, 91, 0, 0, 418, 421, 3, 157, 78, 0, 419,
|
||||
421, 3, 159, 79, 0, 420, 418, 1, 0, 0, 0, 420, 419, 1, 0, 0, 0, 421, 424,
|
||||
1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 425, 1, 0,
|
||||
0, 0, 424, 422, 1, 0, 0, 0, 425, 426, 5, 93, 0, 0, 426, 78, 1, 0, 0, 0,
|
||||
427, 428, 5, 106, 0, 0, 428, 429, 5, 115, 0, 0, 429, 430, 5, 111, 0, 0,
|
||||
430, 431, 5, 110, 0, 0, 431, 432, 5, 95, 0, 0, 432, 433, 5, 99, 0, 0, 433,
|
||||
434, 5, 111, 0, 0, 434, 435, 5, 110, 0, 0, 435, 436, 5, 116, 0, 0, 436,
|
||||
437, 5, 97, 0, 0, 437, 438, 5, 105, 0, 0, 438, 439, 5, 110, 0, 0, 439,
|
||||
454, 5, 115, 0, 0, 440, 441, 5, 74, 0, 0, 441, 442, 5, 83, 0, 0, 442, 443,
|
||||
5, 79, 0, 0, 443, 444, 5, 78, 0, 0, 444, 445, 5, 95, 0, 0, 445, 446, 5,
|
||||
67, 0, 0, 446, 447, 5, 79, 0, 0, 447, 448, 5, 78, 0, 0, 448, 449, 5, 84,
|
||||
0, 0, 449, 450, 5, 65, 0, 0, 450, 451, 5, 73, 0, 0, 451, 452, 5, 78, 0,
|
||||
0, 452, 454, 5, 83, 0, 0, 453, 427, 1, 0, 0, 0, 453, 440, 1, 0, 0, 0, 454,
|
||||
80, 1, 0, 0, 0, 455, 456, 5, 106, 0, 0, 456, 457, 5, 115, 0, 0, 457, 458,
|
||||
5, 111, 0, 0, 458, 459, 5, 110, 0, 0, 459, 460, 5, 95, 0, 0, 460, 461,
|
||||
5, 99, 0, 0, 461, 462, 5, 111, 0, 0, 462, 463, 5, 110, 0, 0, 463, 464,
|
||||
5, 116, 0, 0, 464, 465, 5, 97, 0, 0, 465, 466, 5, 105, 0, 0, 466, 467,
|
||||
5, 110, 0, 0, 467, 468, 5, 115, 0, 0, 468, 469, 5, 95, 0, 0, 469, 470,
|
||||
5, 97, 0, 0, 470, 471, 5, 108, 0, 0, 471, 490, 5, 108, 0, 0, 472, 473,
|
||||
5, 74, 0, 0, 473, 474, 5, 83, 0, 0, 474, 475, 5, 79, 0, 0, 475, 476, 5,
|
||||
78, 0, 0, 476, 477, 5, 95, 0, 0, 477, 478, 5, 67, 0, 0, 478, 479, 5, 79,
|
||||
0, 0, 479, 480, 5, 78, 0, 0, 480, 481, 5, 84, 0, 0, 481, 482, 5, 65, 0,
|
||||
0, 482, 483, 5, 73, 0, 0, 483, 484, 5, 78, 0, 0, 484, 485, 5, 83, 0, 0,
|
||||
485, 486, 5, 95, 0, 0, 486, 487, 5, 65, 0, 0, 487, 488, 5, 76, 0, 0, 488,
|
||||
490, 5, 76, 0, 0, 489, 455, 1, 0, 0, 0, 489, 472, 1, 0, 0, 0, 490, 82,
|
||||
1, 0, 0, 0, 491, 492, 5, 106, 0, 0, 492, 493, 5, 115, 0, 0, 493, 494, 5,
|
||||
111, 0, 0, 494, 495, 5, 110, 0, 0, 495, 496, 5, 95, 0, 0, 496, 497, 5,
|
||||
99, 0, 0, 497, 498, 5, 111, 0, 0, 498, 499, 5, 110, 0, 0, 499, 500, 5,
|
||||
116, 0, 0, 500, 501, 5, 97, 0, 0, 501, 502, 5, 105, 0, 0, 502, 503, 5,
|
||||
110, 0, 0, 503, 504, 5, 115, 0, 0, 504, 505, 5, 95, 0, 0, 505, 506, 5,
|
||||
97, 0, 0, 506, 507, 5, 110, 0, 0, 507, 526, 5, 121, 0, 0, 508, 509, 5,
|
||||
74, 0, 0, 509, 510, 5, 83, 0, 0, 510, 511, 5, 79, 0, 0, 511, 512, 5, 78,
|
||||
0, 0, 512, 513, 5, 95, 0, 0, 513, 514, 5, 67, 0, 0, 514, 515, 5, 79, 0,
|
||||
0, 515, 516, 5, 78, 0, 0, 516, 517, 5, 84, 0, 0, 517, 518, 5, 65, 0, 0,
|
||||
518, 519, 5, 73, 0, 0, 519, 520, 5, 78, 0, 0, 520, 521, 5, 83, 0, 0, 521,
|
||||
522, 5, 95, 0, 0, 522, 523, 5, 65, 0, 0, 523, 524, 5, 78, 0, 0, 524, 526,
|
||||
5, 89, 0, 0, 525, 491, 1, 0, 0, 0, 525, 508, 1, 0, 0, 0, 526, 84, 1, 0,
|
||||
0, 0, 527, 528, 5, 97, 0, 0, 528, 529, 5, 114, 0, 0, 529, 530, 5, 114,
|
||||
0, 0, 530, 531, 5, 97, 0, 0, 531, 532, 5, 121, 0, 0, 532, 533, 5, 95, 0,
|
||||
0, 533, 534, 5, 99, 0, 0, 534, 535, 5, 111, 0, 0, 535, 536, 5, 110, 0,
|
||||
0, 536, 537, 5, 116, 0, 0, 537, 538, 5, 97, 0, 0, 538, 539, 5, 105, 0,
|
||||
0, 539, 540, 5, 110, 0, 0, 540, 556, 5, 115, 0, 0, 541, 542, 5, 65, 0,
|
||||
0, 542, 543, 5, 82, 0, 0, 543, 544, 5, 82, 0, 0, 544, 545, 5, 65, 0, 0,
|
||||
545, 546, 5, 89, 0, 0, 546, 547, 5, 95, 0, 0, 547, 548, 5, 67, 0, 0, 548,
|
||||
549, 5, 79, 0, 0, 549, 550, 5, 78, 0, 0, 550, 551, 5, 84, 0, 0, 551, 552,
|
||||
5, 65, 0, 0, 552, 553, 5, 73, 0, 0, 553, 554, 5, 78, 0, 0, 554, 556, 5,
|
||||
83, 0, 0, 555, 527, 1, 0, 0, 0, 555, 541, 1, 0, 0, 0, 556, 86, 1, 0, 0,
|
||||
0, 557, 558, 5, 97, 0, 0, 558, 559, 5, 114, 0, 0, 559, 560, 5, 114, 0,
|
||||
0, 560, 561, 5, 97, 0, 0, 561, 562, 5, 121, 0, 0, 562, 563, 5, 95, 0, 0,
|
||||
563, 564, 5, 99, 0, 0, 564, 565, 5, 111, 0, 0, 565, 566, 5, 110, 0, 0,
|
||||
566, 567, 5, 116, 0, 0, 567, 568, 5, 97, 0, 0, 568, 569, 5, 105, 0, 0,
|
||||
569, 570, 5, 110, 0, 0, 570, 571, 5, 115, 0, 0, 571, 572, 5, 95, 0, 0,
|
||||
572, 573, 5, 97, 0, 0, 573, 574, 5, 108, 0, 0, 574, 594, 5, 108, 0, 0,
|
||||
575, 576, 5, 65, 0, 0, 576, 577, 5, 82, 0, 0, 577, 578, 5, 82, 0, 0, 578,
|
||||
579, 5, 65, 0, 0, 579, 580, 5, 89, 0, 0, 580, 581, 5, 95, 0, 0, 581, 582,
|
||||
5, 67, 0, 0, 582, 583, 5, 79, 0, 0, 583, 584, 5, 78, 0, 0, 584, 585, 5,
|
||||
84, 0, 0, 585, 586, 5, 65, 0, 0, 586, 587, 5, 73, 0, 0, 587, 588, 5, 78,
|
||||
0, 0, 588, 589, 5, 83, 0, 0, 589, 590, 5, 95, 0, 0, 590, 591, 5, 65, 0,
|
||||
0, 591, 592, 5, 76, 0, 0, 592, 594, 5, 76, 0, 0, 593, 557, 1, 0, 0, 0,
|
||||
593, 575, 1, 0, 0, 0, 594, 88, 1, 0, 0, 0, 595, 596, 5, 97, 0, 0, 596,
|
||||
597, 5, 114, 0, 0, 597, 598, 5, 114, 0, 0, 598, 599, 5, 97, 0, 0, 599,
|
||||
600, 5, 121, 0, 0, 600, 601, 5, 95, 0, 0, 601, 602, 5, 99, 0, 0, 602, 603,
|
||||
5, 111, 0, 0, 603, 604, 5, 110, 0, 0, 604, 605, 5, 116, 0, 0, 605, 606,
|
||||
5, 97, 0, 0, 606, 607, 5, 105, 0, 0, 607, 608, 5, 110, 0, 0, 608, 609,
|
||||
5, 115, 0, 0, 609, 610, 5, 95, 0, 0, 610, 611, 5, 97, 0, 0, 611, 612, 5,
|
||||
110, 0, 0, 612, 632, 5, 121, 0, 0, 613, 614, 5, 65, 0, 0, 614, 615, 5,
|
||||
82, 0, 0, 615, 616, 5, 82, 0, 0, 616, 617, 5, 65, 0, 0, 617, 618, 5, 89,
|
||||
0, 0, 618, 619, 5, 95, 0, 0, 619, 620, 5, 67, 0, 0, 620, 621, 5, 79, 0,
|
||||
0, 621, 622, 5, 78, 0, 0, 622, 623, 5, 84, 0, 0, 623, 624, 5, 65, 0, 0,
|
||||
624, 625, 5, 73, 0, 0, 625, 626, 5, 78, 0, 0, 626, 627, 5, 83, 0, 0, 627,
|
||||
628, 5, 95, 0, 0, 628, 629, 5, 65, 0, 0, 629, 630, 5, 78, 0, 0, 630, 632,
|
||||
5, 89, 0, 0, 631, 595, 1, 0, 0, 0, 631, 613, 1, 0, 0, 0, 632, 90, 1, 0,
|
||||
0, 0, 633, 634, 5, 97, 0, 0, 634, 635, 5, 114, 0, 0, 635, 636, 5, 114,
|
||||
0, 0, 636, 637, 5, 97, 0, 0, 637, 638, 5, 121, 0, 0, 638, 639, 5, 95, 0,
|
||||
0, 639, 640, 5, 108, 0, 0, 640, 641, 5, 101, 0, 0, 641, 642, 5, 110, 0,
|
||||
0, 642, 643, 5, 103, 0, 0, 643, 644, 5, 116, 0, 0, 644, 658, 5, 104, 0,
|
||||
0, 645, 646, 5, 65, 0, 0, 646, 647, 5, 82, 0, 0, 647, 648, 5, 82, 0, 0,
|
||||
648, 649, 5, 65, 0, 0, 649, 650, 5, 89, 0, 0, 650, 651, 5, 95, 0, 0, 651,
|
||||
652, 5, 76, 0, 0, 652, 653, 5, 69, 0, 0, 653, 654, 5, 78, 0, 0, 654, 655,
|
||||
5, 71, 0, 0, 655, 656, 5, 84, 0, 0, 656, 658, 5, 72, 0, 0, 657, 633, 1,
|
||||
0, 0, 0, 657, 645, 1, 0, 0, 0, 658, 92, 1, 0, 0, 0, 659, 660, 5, 116, 0,
|
||||
0, 660, 661, 5, 114, 0, 0, 661, 662, 5, 117, 0, 0, 662, 687, 5, 101, 0,
|
||||
0, 663, 664, 5, 84, 0, 0, 664, 665, 5, 114, 0, 0, 665, 666, 5, 117, 0,
|
||||
0, 666, 687, 5, 101, 0, 0, 667, 668, 5, 84, 0, 0, 668, 669, 5, 82, 0, 0,
|
||||
669, 670, 5, 85, 0, 0, 670, 687, 5, 69, 0, 0, 671, 672, 5, 102, 0, 0, 672,
|
||||
673, 5, 97, 0, 0, 673, 674, 5, 108, 0, 0, 674, 675, 5, 115, 0, 0, 675,
|
||||
687, 5, 101, 0, 0, 676, 677, 5, 70, 0, 0, 677, 678, 5, 97, 0, 0, 678, 679,
|
||||
5, 108, 0, 0, 679, 680, 5, 115, 0, 0, 680, 687, 5, 101, 0, 0, 681, 682,
|
||||
5, 70, 0, 0, 682, 683, 5, 65, 0, 0, 683, 684, 5, 76, 0, 0, 684, 685, 5,
|
||||
83, 0, 0, 685, 687, 5, 69, 0, 0, 686, 659, 1, 0, 0, 0, 686, 663, 1, 0,
|
||||
0, 0, 686, 667, 1, 0, 0, 0, 686, 671, 1, 0, 0, 0, 686, 676, 1, 0, 0, 0,
|
||||
686, 681, 1, 0, 0, 0, 687, 94, 1, 0, 0, 0, 688, 693, 3, 123, 61, 0, 689,
|
||||
693, 3, 125, 62, 0, 690, 693, 3, 127, 63, 0, 691, 693, 3, 121, 60, 0, 692,
|
||||
688, 1, 0, 0, 0, 692, 689, 1, 0, 0, 0, 692, 690, 1, 0, 0, 0, 692, 691,
|
||||
1, 0, 0, 0, 693, 96, 1, 0, 0, 0, 694, 697, 3, 139, 69, 0, 695, 697, 3,
|
||||
141, 70, 0, 696, 694, 1, 0, 0, 0, 696, 695, 1, 0, 0, 0, 697, 98, 1, 0,
|
||||
0, 0, 698, 703, 3, 117, 58, 0, 699, 702, 3, 117, 58, 0, 700, 702, 3, 119,
|
||||
59, 0, 701, 699, 1, 0, 0, 0, 701, 700, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0,
|
||||
703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 100, 1, 0, 0, 0, 705,
|
||||
703, 1, 0, 0, 0, 706, 707, 5, 36, 0, 0, 707, 708, 5, 109, 0, 0, 708, 709,
|
||||
5, 101, 0, 0, 709, 710, 5, 116, 0, 0, 710, 711, 5, 97, 0, 0, 711, 102,
|
||||
1, 0, 0, 0, 712, 714, 3, 107, 53, 0, 713, 712, 1, 0, 0, 0, 713, 714, 1,
|
||||
0, 0, 0, 714, 725, 1, 0, 0, 0, 715, 717, 5, 34, 0, 0, 716, 718, 3, 109,
|
||||
54, 0, 717, 716, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0,
|
||||
719, 726, 5, 34, 0, 0, 720, 722, 5, 39, 0, 0, 721, 723, 3, 111, 55, 0,
|
||||
722, 721, 1, 0, 0, 0, 722, 723, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724,
|
||||
726, 5, 39, 0, 0, 725, 715, 1, 0, 0, 0, 725, 720, 1, 0, 0, 0, 726, 104,
|
||||
1, 0, 0, 0, 727, 730, 3, 99, 49, 0, 728, 730, 3, 101, 50, 0, 729, 727,
|
||||
1, 0, 0, 0, 729, 728, 1, 0, 0, 0, 730, 738, 1, 0, 0, 0, 731, 734, 5, 91,
|
||||
0, 0, 732, 735, 3, 103, 51, 0, 733, 735, 3, 123, 61, 0, 734, 732, 1, 0,
|
||||
0, 0, 734, 733, 1, 0, 0, 0, 735, 736, 1, 0, 0, 0, 736, 737, 5, 93, 0, 0,
|
||||
737, 739, 1, 0, 0, 0, 738, 731, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740,
|
||||
738, 1, 0, 0, 0, 740, 741, 1, 0, 0, 0, 741, 106, 1, 0, 0, 0, 742, 743,
|
||||
5, 117, 0, 0, 743, 746, 5, 56, 0, 0, 744, 746, 7, 0, 0, 0, 745, 742, 1,
|
||||
0, 0, 0, 745, 744, 1, 0, 0, 0, 746, 108, 1, 0, 0, 0, 747, 749, 3, 113,
|
||||
56, 0, 748, 747, 1, 0, 0, 0, 749, 750, 1, 0, 0, 0, 750, 748, 1, 0, 0, 0,
|
||||
750, 751, 1, 0, 0, 0, 751, 110, 1, 0, 0, 0, 752, 754, 3, 115, 57, 0, 753,
|
||||
752, 1, 0, 0, 0, 754, 755, 1, 0, 0, 0, 755, 753, 1, 0, 0, 0, 755, 756,
|
||||
1, 0, 0, 0, 756, 112, 1, 0, 0, 0, 757, 765, 8, 1, 0, 0, 758, 765, 3, 155,
|
||||
77, 0, 759, 760, 5, 92, 0, 0, 760, 765, 5, 10, 0, 0, 761, 762, 5, 92, 0,
|
||||
0, 762, 763, 5, 13, 0, 0, 763, 765, 5, 10, 0, 0, 764, 757, 1, 0, 0, 0,
|
||||
764, 758, 1, 0, 0, 0, 764, 759, 1, 0, 0, 0, 764, 761, 1, 0, 0, 0, 765,
|
||||
114, 1, 0, 0, 0, 766, 774, 8, 2, 0, 0, 767, 774, 3, 155, 77, 0, 768, 769,
|
||||
5, 92, 0, 0, 769, 774, 5, 10, 0, 0, 770, 771, 5, 92, 0, 0, 771, 772, 5,
|
||||
13, 0, 0, 772, 774, 5, 10, 0, 0, 773, 766, 1, 0, 0, 0, 773, 767, 1, 0,
|
||||
0, 0, 773, 768, 1, 0, 0, 0, 773, 770, 1, 0, 0, 0, 774, 116, 1, 0, 0, 0,
|
||||
775, 776, 7, 3, 0, 0, 776, 118, 1, 0, 0, 0, 777, 778, 7, 4, 0, 0, 778,
|
||||
120, 1, 0, 0, 0, 779, 780, 5, 48, 0, 0, 780, 782, 7, 5, 0, 0, 781, 783,
|
||||
7, 6, 0, 0, 782, 781, 1, 0, 0, 0, 783, 784, 1, 0, 0, 0, 784, 782, 1, 0,
|
||||
0, 0, 784, 785, 1, 0, 0, 0, 785, 122, 1, 0, 0, 0, 786, 790, 3, 129, 64,
|
||||
0, 787, 789, 3, 119, 59, 0, 788, 787, 1, 0, 0, 0, 789, 792, 1, 0, 0, 0,
|
||||
790, 788, 1, 0, 0, 0, 790, 791, 1, 0, 0, 0, 791, 795, 1, 0, 0, 0, 792,
|
||||
790, 1, 0, 0, 0, 793, 795, 5, 48, 0, 0, 794, 786, 1, 0, 0, 0, 794, 793,
|
||||
1, 0, 0, 0, 795, 124, 1, 0, 0, 0, 796, 800, 5, 48, 0, 0, 797, 799, 3, 131,
|
||||
65, 0, 798, 797, 1, 0, 0, 0, 799, 802, 1, 0, 0, 0, 800, 798, 1, 0, 0, 0,
|
||||
800, 801, 1, 0, 0, 0, 801, 126, 1, 0, 0, 0, 802, 800, 1, 0, 0, 0, 803,
|
||||
804, 5, 48, 0, 0, 804, 805, 7, 7, 0, 0, 805, 806, 3, 151, 75, 0, 806, 128,
|
||||
1, 0, 0, 0, 807, 808, 7, 8, 0, 0, 808, 130, 1, 0, 0, 0, 809, 810, 7, 9,
|
||||
0, 0, 810, 132, 1, 0, 0, 0, 811, 812, 7, 10, 0, 0, 812, 134, 1, 0, 0, 0,
|
||||
813, 814, 3, 133, 66, 0, 814, 815, 3, 133, 66, 0, 815, 816, 3, 133, 66,
|
||||
0, 816, 817, 3, 133, 66, 0, 817, 136, 1, 0, 0, 0, 818, 819, 5, 92, 0, 0,
|
||||
819, 820, 5, 117, 0, 0, 820, 821, 1, 0, 0, 0, 821, 829, 3, 135, 67, 0,
|
||||
822, 823, 5, 92, 0, 0, 823, 824, 5, 85, 0, 0, 824, 825, 1, 0, 0, 0, 825,
|
||||
826, 3, 135, 67, 0, 826, 827, 3, 135, 67, 0, 827, 829, 1, 0, 0, 0, 828,
|
||||
818, 1, 0, 0, 0, 828, 822, 1, 0, 0, 0, 829, 138, 1, 0, 0, 0, 830, 832,
|
||||
3, 143, 71, 0, 831, 833, 3, 145, 72, 0, 832, 831, 1, 0, 0, 0, 832, 833,
|
||||
1, 0, 0, 0, 833, 838, 1, 0, 0, 0, 834, 835, 3, 147, 73, 0, 835, 836, 3,
|
||||
145, 72, 0, 836, 838, 1, 0, 0, 0, 837, 830, 1, 0, 0, 0, 837, 834, 1, 0,
|
||||
0, 0, 838, 140, 1, 0, 0, 0, 839, 840, 5, 48, 0, 0, 840, 843, 7, 7, 0, 0,
|
||||
841, 844, 3, 149, 74, 0, 842, 844, 3, 151, 75, 0, 843, 841, 1, 0, 0, 0,
|
||||
843, 842, 1, 0, 0, 0, 844, 845, 1, 0, 0, 0, 845, 846, 3, 153, 76, 0, 846,
|
||||
142, 1, 0, 0, 0, 847, 849, 3, 147, 73, 0, 848, 847, 1, 0, 0, 0, 848, 849,
|
||||
1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 851, 5, 46, 0, 0, 851, 856, 3, 147,
|
||||
73, 0, 852, 853, 3, 147, 73, 0, 853, 854, 5, 46, 0, 0, 854, 856, 1, 0,
|
||||
0, 0, 855, 848, 1, 0, 0, 0, 855, 852, 1, 0, 0, 0, 856, 144, 1, 0, 0, 0,
|
||||
857, 859, 7, 11, 0, 0, 858, 860, 7, 12, 0, 0, 859, 858, 1, 0, 0, 0, 859,
|
||||
860, 1, 0, 0, 0, 860, 861, 1, 0, 0, 0, 861, 862, 3, 147, 73, 0, 862, 146,
|
||||
1, 0, 0, 0, 863, 865, 3, 119, 59, 0, 864, 863, 1, 0, 0, 0, 865, 866, 1,
|
||||
0, 0, 0, 866, 864, 1, 0, 0, 0, 866, 867, 1, 0, 0, 0, 867, 148, 1, 0, 0,
|
||||
0, 868, 870, 3, 151, 75, 0, 869, 868, 1, 0, 0, 0, 869, 870, 1, 0, 0, 0,
|
||||
870, 871, 1, 0, 0, 0, 871, 872, 5, 46, 0, 0, 872, 877, 3, 151, 75, 0, 873,
|
||||
874, 3, 151, 75, 0, 874, 875, 5, 46, 0, 0, 875, 877, 1, 0, 0, 0, 876, 869,
|
||||
1, 0, 0, 0, 876, 873, 1, 0, 0, 0, 877, 150, 1, 0, 0, 0, 878, 880, 3, 133,
|
||||
66, 0, 879, 878, 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 879, 1, 0, 0, 0,
|
||||
881, 882, 1, 0, 0, 0, 882, 152, 1, 0, 0, 0, 883, 885, 7, 13, 0, 0, 884,
|
||||
886, 7, 12, 0, 0, 885, 884, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 887,
|
||||
1, 0, 0, 0, 887, 888, 3, 147, 73, 0, 888, 154, 1, 0, 0, 0, 889, 890, 5,
|
||||
92, 0, 0, 890, 905, 7, 14, 0, 0, 891, 892, 5, 92, 0, 0, 892, 894, 3, 131,
|
||||
65, 0, 893, 895, 3, 131, 65, 0, 894, 893, 1, 0, 0, 0, 894, 895, 1, 0, 0,
|
||||
0, 895, 897, 1, 0, 0, 0, 896, 898, 3, 131, 65, 0, 897, 896, 1, 0, 0, 0,
|
||||
897, 898, 1, 0, 0, 0, 898, 905, 1, 0, 0, 0, 899, 900, 5, 92, 0, 0, 900,
|
||||
901, 5, 120, 0, 0, 901, 902, 1, 0, 0, 0, 902, 905, 3, 151, 75, 0, 903,
|
||||
905, 3, 137, 68, 0, 904, 889, 1, 0, 0, 0, 904, 891, 1, 0, 0, 0, 904, 899,
|
||||
1, 0, 0, 0, 904, 903, 1, 0, 0, 0, 905, 156, 1, 0, 0, 0, 906, 908, 7, 15,
|
||||
0, 0, 907, 906, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 907, 1, 0, 0, 0,
|
||||
909, 910, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 912, 6, 78, 0, 0, 912,
|
||||
158, 1, 0, 0, 0, 913, 915, 5, 13, 0, 0, 914, 916, 5, 10, 0, 0, 915, 914,
|
||||
1, 0, 0, 0, 915, 916, 1, 0, 0, 0, 916, 919, 1, 0, 0, 0, 917, 919, 5, 10,
|
||||
0, 0, 918, 913, 1, 0, 0, 0, 918, 917, 1, 0, 0, 0, 919, 920, 1, 0, 0, 0,
|
||||
920, 921, 6, 79, 0, 0, 921, 160, 1, 0, 0, 0, 62, 0, 199, 213, 235, 261,
|
||||
289, 307, 315, 350, 358, 374, 398, 409, 415, 420, 422, 453, 489, 525, 555,
|
||||
593, 631, 657, 686, 692, 696, 701, 703, 713, 717, 722, 725, 729, 734, 740,
|
||||
745, 750, 755, 764, 773, 784, 790, 794, 800, 828, 832, 837, 843, 848, 855,
|
||||
859, 866, 869, 876, 881, 885, 894, 897, 904, 909, 915, 918, 1, 6, 0, 0,
|
||||
0, 159, 0, 161, 0, 163, 0, 165, 0, 167, 0, 169, 0, 171, 0, 173, 62, 175,
|
||||
63, 1, 0, 16, 3, 0, 76, 76, 85, 85, 117, 117, 4, 0, 10, 10, 13, 13, 34,
|
||||
34, 92, 92, 4, 0, 10, 10, 13, 13, 39, 39, 92, 92, 3, 0, 65, 90, 95, 95,
|
||||
97, 122, 1, 0, 48, 57, 2, 0, 66, 66, 98, 98, 1, 0, 48, 49, 2, 0, 88, 88,
|
||||
120, 120, 1, 0, 49, 57, 1, 0, 48, 55, 3, 0, 48, 57, 65, 70, 97, 102, 2,
|
||||
0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 2, 0, 80, 80, 112, 112, 10,
|
||||
0, 34, 34, 39, 39, 63, 63, 92, 92, 97, 98, 102, 102, 110, 110, 114, 114,
|
||||
116, 116, 118, 118, 2, 0, 9, 9, 32, 32, 1178, 0, 1, 1, 0, 0, 0, 0, 3, 1,
|
||||
0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1,
|
||||
0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19,
|
||||
1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0,
|
||||
27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0,
|
||||
0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0,
|
||||
0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0,
|
||||
0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1,
|
||||
0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65,
|
||||
1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0,
|
||||
73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0,
|
||||
0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0,
|
||||
0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 0, 95, 1, 0,
|
||||
0, 0, 0, 97, 1, 0, 0, 0, 0, 99, 1, 0, 0, 0, 0, 101, 1, 0, 0, 0, 0, 103,
|
||||
1, 0, 0, 0, 0, 105, 1, 0, 0, 0, 0, 107, 1, 0, 0, 0, 0, 109, 1, 0, 0, 0,
|
||||
0, 111, 1, 0, 0, 0, 0, 113, 1, 0, 0, 0, 0, 115, 1, 0, 0, 0, 0, 117, 1,
|
||||
0, 0, 0, 0, 119, 1, 0, 0, 0, 0, 121, 1, 0, 0, 0, 0, 173, 1, 0, 0, 0, 0,
|
||||
175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 3, 179, 1, 0, 0, 0, 5, 181, 1, 0,
|
||||
0, 0, 7, 183, 1, 0, 0, 0, 9, 185, 1, 0, 0, 0, 11, 187, 1, 0, 0, 0, 13,
|
||||
189, 1, 0, 0, 0, 15, 191, 1, 0, 0, 0, 17, 193, 1, 0, 0, 0, 19, 196, 1,
|
||||
0, 0, 0, 21, 198, 1, 0, 0, 0, 23, 201, 1, 0, 0, 0, 25, 204, 1, 0, 0, 0,
|
||||
27, 215, 1, 0, 0, 0, 29, 229, 1, 0, 0, 0, 31, 251, 1, 0, 0, 0, 33, 277,
|
||||
1, 0, 0, 0, 35, 305, 1, 0, 0, 0, 37, 323, 1, 0, 0, 0, 39, 331, 1, 0, 0,
|
||||
0, 41, 333, 1, 0, 0, 0, 43, 335, 1, 0, 0, 0, 45, 337, 1, 0, 0, 0, 47, 339,
|
||||
1, 0, 0, 0, 49, 341, 1, 0, 0, 0, 51, 343, 1, 0, 0, 0, 53, 346, 1, 0, 0,
|
||||
0, 55, 349, 1, 0, 0, 0, 57, 352, 1, 0, 0, 0, 59, 354, 1, 0, 0, 0, 61, 356,
|
||||
1, 0, 0, 0, 63, 366, 1, 0, 0, 0, 65, 374, 1, 0, 0, 0, 67, 390, 1, 0, 0,
|
||||
0, 69, 414, 1, 0, 0, 0, 71, 416, 1, 0, 0, 0, 73, 425, 1, 0, 0, 0, 75, 431,
|
||||
1, 0, 0, 0, 77, 433, 1, 0, 0, 0, 79, 469, 1, 0, 0, 0, 81, 505, 1, 0, 0,
|
||||
0, 83, 541, 1, 0, 0, 0, 85, 571, 1, 0, 0, 0, 87, 609, 1, 0, 0, 0, 89, 647,
|
||||
1, 0, 0, 0, 91, 673, 1, 0, 0, 0, 93, 693, 1, 0, 0, 0, 95, 715, 1, 0, 0,
|
||||
0, 97, 739, 1, 0, 0, 0, 99, 761, 1, 0, 0, 0, 101, 785, 1, 0, 0, 0, 103,
|
||||
813, 1, 0, 0, 0, 105, 833, 1, 0, 0, 0, 107, 855, 1, 0, 0, 0, 109, 884,
|
||||
1, 0, 0, 0, 111, 890, 1, 0, 0, 0, 113, 894, 1, 0, 0, 0, 115, 896, 1, 0,
|
||||
0, 0, 117, 904, 1, 0, 0, 0, 119, 911, 1, 0, 0, 0, 121, 927, 1, 0, 0, 0,
|
||||
123, 943, 1, 0, 0, 0, 125, 946, 1, 0, 0, 0, 127, 951, 1, 0, 0, 0, 129,
|
||||
962, 1, 0, 0, 0, 131, 971, 1, 0, 0, 0, 133, 973, 1, 0, 0, 0, 135, 975,
|
||||
1, 0, 0, 0, 137, 977, 1, 0, 0, 0, 139, 992, 1, 0, 0, 0, 141, 994, 1, 0,
|
||||
0, 0, 143, 1001, 1, 0, 0, 0, 145, 1005, 1, 0, 0, 0, 147, 1007, 1, 0, 0,
|
||||
0, 149, 1009, 1, 0, 0, 0, 151, 1011, 1, 0, 0, 0, 153, 1026, 1, 0, 0, 0,
|
||||
155, 1035, 1, 0, 0, 0, 157, 1037, 1, 0, 0, 0, 159, 1053, 1, 0, 0, 0, 161,
|
||||
1055, 1, 0, 0, 0, 163, 1062, 1, 0, 0, 0, 165, 1074, 1, 0, 0, 0, 167, 1077,
|
||||
1, 0, 0, 0, 169, 1081, 1, 0, 0, 0, 171, 1102, 1, 0, 0, 0, 173, 1105, 1,
|
||||
0, 0, 0, 175, 1116, 1, 0, 0, 0, 177, 178, 5, 40, 0, 0, 178, 2, 1, 0, 0,
|
||||
0, 179, 180, 5, 41, 0, 0, 180, 4, 1, 0, 0, 0, 181, 182, 5, 91, 0, 0, 182,
|
||||
6, 1, 0, 0, 0, 183, 184, 5, 44, 0, 0, 184, 8, 1, 0, 0, 0, 185, 186, 5,
|
||||
93, 0, 0, 186, 10, 1, 0, 0, 0, 187, 188, 5, 123, 0, 0, 188, 12, 1, 0, 0,
|
||||
0, 189, 190, 5, 125, 0, 0, 190, 14, 1, 0, 0, 0, 191, 192, 5, 60, 0, 0,
|
||||
192, 16, 1, 0, 0, 0, 193, 194, 5, 60, 0, 0, 194, 195, 5, 61, 0, 0, 195,
|
||||
18, 1, 0, 0, 0, 196, 197, 5, 62, 0, 0, 197, 20, 1, 0, 0, 0, 198, 199, 5,
|
||||
62, 0, 0, 199, 200, 5, 61, 0, 0, 200, 22, 1, 0, 0, 0, 201, 202, 5, 61,
|
||||
0, 0, 202, 203, 5, 61, 0, 0, 203, 24, 1, 0, 0, 0, 204, 205, 5, 33, 0, 0,
|
||||
205, 206, 5, 61, 0, 0, 206, 26, 1, 0, 0, 0, 207, 208, 5, 108, 0, 0, 208,
|
||||
209, 5, 105, 0, 0, 209, 210, 5, 107, 0, 0, 210, 216, 5, 101, 0, 0, 211,
|
||||
212, 5, 76, 0, 0, 212, 213, 5, 73, 0, 0, 213, 214, 5, 75, 0, 0, 214, 216,
|
||||
5, 69, 0, 0, 215, 207, 1, 0, 0, 0, 215, 211, 1, 0, 0, 0, 216, 28, 1, 0,
|
||||
0, 0, 217, 218, 5, 101, 0, 0, 218, 219, 5, 120, 0, 0, 219, 220, 5, 105,
|
||||
0, 0, 220, 221, 5, 115, 0, 0, 221, 222, 5, 116, 0, 0, 222, 230, 5, 115,
|
||||
0, 0, 223, 224, 5, 69, 0, 0, 224, 225, 5, 88, 0, 0, 225, 226, 5, 73, 0,
|
||||
0, 226, 227, 5, 83, 0, 0, 227, 228, 5, 84, 0, 0, 228, 230, 5, 83, 0, 0,
|
||||
229, 217, 1, 0, 0, 0, 229, 223, 1, 0, 0, 0, 230, 30, 1, 0, 0, 0, 231, 232,
|
||||
5, 116, 0, 0, 232, 233, 5, 101, 0, 0, 233, 234, 5, 120, 0, 0, 234, 235,
|
||||
5, 116, 0, 0, 235, 236, 5, 95, 0, 0, 236, 237, 5, 109, 0, 0, 237, 238,
|
||||
5, 97, 0, 0, 238, 239, 5, 116, 0, 0, 239, 240, 5, 99, 0, 0, 240, 252, 5,
|
||||
104, 0, 0, 241, 242, 5, 84, 0, 0, 242, 243, 5, 69, 0, 0, 243, 244, 5, 88,
|
||||
0, 0, 244, 245, 5, 84, 0, 0, 245, 246, 5, 95, 0, 0, 246, 247, 5, 77, 0,
|
||||
0, 247, 248, 5, 65, 0, 0, 248, 249, 5, 84, 0, 0, 249, 250, 5, 67, 0, 0,
|
||||
250, 252, 5, 72, 0, 0, 251, 231, 1, 0, 0, 0, 251, 241, 1, 0, 0, 0, 252,
|
||||
32, 1, 0, 0, 0, 253, 254, 5, 112, 0, 0, 254, 255, 5, 104, 0, 0, 255, 256,
|
||||
5, 114, 0, 0, 256, 257, 5, 97, 0, 0, 257, 258, 5, 115, 0, 0, 258, 259,
|
||||
5, 101, 0, 0, 259, 260, 5, 95, 0, 0, 260, 261, 5, 109, 0, 0, 261, 262,
|
||||
5, 97, 0, 0, 262, 263, 5, 116, 0, 0, 263, 264, 5, 99, 0, 0, 264, 278, 5,
|
||||
104, 0, 0, 265, 266, 5, 80, 0, 0, 266, 267, 5, 72, 0, 0, 267, 268, 5, 82,
|
||||
0, 0, 268, 269, 5, 65, 0, 0, 269, 270, 5, 83, 0, 0, 270, 271, 5, 69, 0,
|
||||
0, 271, 272, 5, 95, 0, 0, 272, 273, 5, 77, 0, 0, 273, 274, 5, 65, 0, 0,
|
||||
274, 275, 5, 84, 0, 0, 275, 276, 5, 67, 0, 0, 276, 278, 5, 72, 0, 0, 277,
|
||||
253, 1, 0, 0, 0, 277, 265, 1, 0, 0, 0, 278, 34, 1, 0, 0, 0, 279, 280, 5,
|
||||
114, 0, 0, 280, 281, 5, 97, 0, 0, 281, 282, 5, 110, 0, 0, 282, 283, 5,
|
||||
100, 0, 0, 283, 284, 5, 111, 0, 0, 284, 285, 5, 109, 0, 0, 285, 286, 5,
|
||||
95, 0, 0, 286, 287, 5, 115, 0, 0, 287, 288, 5, 97, 0, 0, 288, 289, 5, 109,
|
||||
0, 0, 289, 290, 5, 112, 0, 0, 290, 291, 5, 108, 0, 0, 291, 306, 5, 101,
|
||||
0, 0, 292, 293, 5, 82, 0, 0, 293, 294, 5, 65, 0, 0, 294, 295, 5, 78, 0,
|
||||
0, 295, 296, 5, 68, 0, 0, 296, 297, 5, 79, 0, 0, 297, 298, 5, 77, 0, 0,
|
||||
298, 299, 5, 95, 0, 0, 299, 300, 5, 83, 0, 0, 300, 301, 5, 65, 0, 0, 301,
|
||||
302, 5, 77, 0, 0, 302, 303, 5, 80, 0, 0, 303, 304, 5, 76, 0, 0, 304, 306,
|
||||
5, 69, 0, 0, 305, 279, 1, 0, 0, 0, 305, 292, 1, 0, 0, 0, 306, 36, 1, 0,
|
||||
0, 0, 307, 308, 5, 105, 0, 0, 308, 309, 5, 110, 0, 0, 309, 310, 5, 116,
|
||||
0, 0, 310, 311, 5, 101, 0, 0, 311, 312, 5, 114, 0, 0, 312, 313, 5, 118,
|
||||
0, 0, 313, 314, 5, 97, 0, 0, 314, 324, 5, 108, 0, 0, 315, 316, 5, 73, 0,
|
||||
0, 316, 317, 5, 78, 0, 0, 317, 318, 5, 84, 0, 0, 318, 319, 5, 69, 0, 0,
|
||||
319, 320, 5, 82, 0, 0, 320, 321, 5, 86, 0, 0, 321, 322, 5, 65, 0, 0, 322,
|
||||
324, 5, 76, 0, 0, 323, 307, 1, 0, 0, 0, 323, 315, 1, 0, 0, 0, 324, 38,
|
||||
1, 0, 0, 0, 325, 326, 5, 105, 0, 0, 326, 327, 5, 115, 0, 0, 327, 332, 5,
|
||||
111, 0, 0, 328, 329, 5, 73, 0, 0, 329, 330, 5, 83, 0, 0, 330, 332, 5, 79,
|
||||
0, 0, 331, 325, 1, 0, 0, 0, 331, 328, 1, 0, 0, 0, 332, 40, 1, 0, 0, 0,
|
||||
333, 334, 5, 43, 0, 0, 334, 42, 1, 0, 0, 0, 335, 336, 5, 45, 0, 0, 336,
|
||||
44, 1, 0, 0, 0, 337, 338, 5, 42, 0, 0, 338, 46, 1, 0, 0, 0, 339, 340, 5,
|
||||
47, 0, 0, 340, 48, 1, 0, 0, 0, 341, 342, 5, 37, 0, 0, 342, 50, 1, 0, 0,
|
||||
0, 343, 344, 5, 42, 0, 0, 344, 345, 5, 42, 0, 0, 345, 52, 1, 0, 0, 0, 346,
|
||||
347, 5, 60, 0, 0, 347, 348, 5, 60, 0, 0, 348, 54, 1, 0, 0, 0, 349, 350,
|
||||
5, 62, 0, 0, 350, 351, 5, 62, 0, 0, 351, 56, 1, 0, 0, 0, 352, 353, 5, 38,
|
||||
0, 0, 353, 58, 1, 0, 0, 0, 354, 355, 5, 124, 0, 0, 355, 60, 1, 0, 0, 0,
|
||||
356, 357, 5, 94, 0, 0, 357, 62, 1, 0, 0, 0, 358, 359, 5, 38, 0, 0, 359,
|
||||
367, 5, 38, 0, 0, 360, 361, 5, 97, 0, 0, 361, 362, 5, 110, 0, 0, 362, 367,
|
||||
5, 100, 0, 0, 363, 364, 5, 65, 0, 0, 364, 365, 5, 78, 0, 0, 365, 367, 5,
|
||||
68, 0, 0, 366, 358, 1, 0, 0, 0, 366, 360, 1, 0, 0, 0, 366, 363, 1, 0, 0,
|
||||
0, 367, 64, 1, 0, 0, 0, 368, 369, 5, 124, 0, 0, 369, 375, 5, 124, 0, 0,
|
||||
370, 371, 5, 111, 0, 0, 371, 375, 5, 114, 0, 0, 372, 373, 5, 79, 0, 0,
|
||||
373, 375, 5, 82, 0, 0, 374, 368, 1, 0, 0, 0, 374, 370, 1, 0, 0, 0, 374,
|
||||
372, 1, 0, 0, 0, 375, 66, 1, 0, 0, 0, 376, 377, 5, 105, 0, 0, 377, 378,
|
||||
5, 115, 0, 0, 378, 379, 5, 32, 0, 0, 379, 380, 5, 110, 0, 0, 380, 381,
|
||||
5, 117, 0, 0, 381, 382, 5, 108, 0, 0, 382, 391, 5, 108, 0, 0, 383, 384,
|
||||
5, 73, 0, 0, 384, 385, 5, 83, 0, 0, 385, 386, 5, 32, 0, 0, 386, 387, 5,
|
||||
78, 0, 0, 387, 388, 5, 85, 0, 0, 388, 389, 5, 76, 0, 0, 389, 391, 5, 76,
|
||||
0, 0, 390, 376, 1, 0, 0, 0, 390, 383, 1, 0, 0, 0, 391, 68, 1, 0, 0, 0,
|
||||
392, 393, 5, 105, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 5, 32, 0, 0,
|
||||
395, 396, 5, 110, 0, 0, 396, 397, 5, 111, 0, 0, 397, 398, 5, 116, 0, 0,
|
||||
398, 399, 5, 32, 0, 0, 399, 400, 5, 110, 0, 0, 400, 401, 5, 117, 0, 0,
|
||||
401, 402, 5, 108, 0, 0, 402, 415, 5, 108, 0, 0, 403, 404, 5, 73, 0, 0,
|
||||
404, 405, 5, 83, 0, 0, 405, 406, 5, 32, 0, 0, 406, 407, 5, 78, 0, 0, 407,
|
||||
408, 5, 79, 0, 0, 408, 409, 5, 84, 0, 0, 409, 410, 5, 32, 0, 0, 410, 411,
|
||||
5, 78, 0, 0, 411, 412, 5, 85, 0, 0, 412, 413, 5, 76, 0, 0, 413, 415, 5,
|
||||
76, 0, 0, 414, 392, 1, 0, 0, 0, 414, 403, 1, 0, 0, 0, 415, 70, 1, 0, 0,
|
||||
0, 416, 417, 5, 126, 0, 0, 417, 72, 1, 0, 0, 0, 418, 426, 5, 33, 0, 0,
|
||||
419, 420, 5, 110, 0, 0, 420, 421, 5, 111, 0, 0, 421, 426, 5, 116, 0, 0,
|
||||
422, 423, 5, 78, 0, 0, 423, 424, 5, 79, 0, 0, 424, 426, 5, 84, 0, 0, 425,
|
||||
418, 1, 0, 0, 0, 425, 419, 1, 0, 0, 0, 425, 422, 1, 0, 0, 0, 426, 74, 1,
|
||||
0, 0, 0, 427, 428, 5, 105, 0, 0, 428, 432, 5, 110, 0, 0, 429, 430, 5, 73,
|
||||
0, 0, 430, 432, 5, 78, 0, 0, 431, 427, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0,
|
||||
432, 76, 1, 0, 0, 0, 433, 438, 5, 91, 0, 0, 434, 437, 3, 173, 86, 0, 435,
|
||||
437, 3, 175, 87, 0, 436, 434, 1, 0, 0, 0, 436, 435, 1, 0, 0, 0, 437, 440,
|
||||
1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 438, 439, 1, 0, 0, 0, 439, 441, 1, 0,
|
||||
0, 0, 440, 438, 1, 0, 0, 0, 441, 442, 5, 93, 0, 0, 442, 78, 1, 0, 0, 0,
|
||||
443, 444, 5, 106, 0, 0, 444, 445, 5, 115, 0, 0, 445, 446, 5, 111, 0, 0,
|
||||
446, 447, 5, 110, 0, 0, 447, 448, 5, 95, 0, 0, 448, 449, 5, 99, 0, 0, 449,
|
||||
450, 5, 111, 0, 0, 450, 451, 5, 110, 0, 0, 451, 452, 5, 116, 0, 0, 452,
|
||||
453, 5, 97, 0, 0, 453, 454, 5, 105, 0, 0, 454, 455, 5, 110, 0, 0, 455,
|
||||
470, 5, 115, 0, 0, 456, 457, 5, 74, 0, 0, 457, 458, 5, 83, 0, 0, 458, 459,
|
||||
5, 79, 0, 0, 459, 460, 5, 78, 0, 0, 460, 461, 5, 95, 0, 0, 461, 462, 5,
|
||||
67, 0, 0, 462, 463, 5, 79, 0, 0, 463, 464, 5, 78, 0, 0, 464, 465, 5, 84,
|
||||
0, 0, 465, 466, 5, 65, 0, 0, 466, 467, 5, 73, 0, 0, 467, 468, 5, 78, 0,
|
||||
0, 468, 470, 5, 83, 0, 0, 469, 443, 1, 0, 0, 0, 469, 456, 1, 0, 0, 0, 470,
|
||||
80, 1, 0, 0, 0, 471, 472, 5, 106, 0, 0, 472, 473, 5, 115, 0, 0, 473, 474,
|
||||
5, 111, 0, 0, 474, 475, 5, 110, 0, 0, 475, 476, 5, 95, 0, 0, 476, 477,
|
||||
5, 99, 0, 0, 477, 478, 5, 111, 0, 0, 478, 479, 5, 110, 0, 0, 479, 480,
|
||||
5, 116, 0, 0, 480, 481, 5, 97, 0, 0, 481, 482, 5, 105, 0, 0, 482, 483,
|
||||
5, 110, 0, 0, 483, 484, 5, 115, 0, 0, 484, 485, 5, 95, 0, 0, 485, 486,
|
||||
5, 97, 0, 0, 486, 487, 5, 108, 0, 0, 487, 506, 5, 108, 0, 0, 488, 489,
|
||||
5, 74, 0, 0, 489, 490, 5, 83, 0, 0, 490, 491, 5, 79, 0, 0, 491, 492, 5,
|
||||
78, 0, 0, 492, 493, 5, 95, 0, 0, 493, 494, 5, 67, 0, 0, 494, 495, 5, 79,
|
||||
0, 0, 495, 496, 5, 78, 0, 0, 496, 497, 5, 84, 0, 0, 497, 498, 5, 65, 0,
|
||||
0, 498, 499, 5, 73, 0, 0, 499, 500, 5, 78, 0, 0, 500, 501, 5, 83, 0, 0,
|
||||
501, 502, 5, 95, 0, 0, 502, 503, 5, 65, 0, 0, 503, 504, 5, 76, 0, 0, 504,
|
||||
506, 5, 76, 0, 0, 505, 471, 1, 0, 0, 0, 505, 488, 1, 0, 0, 0, 506, 82,
|
||||
1, 0, 0, 0, 507, 508, 5, 106, 0, 0, 508, 509, 5, 115, 0, 0, 509, 510, 5,
|
||||
111, 0, 0, 510, 511, 5, 110, 0, 0, 511, 512, 5, 95, 0, 0, 512, 513, 5,
|
||||
99, 0, 0, 513, 514, 5, 111, 0, 0, 514, 515, 5, 110, 0, 0, 515, 516, 5,
|
||||
116, 0, 0, 516, 517, 5, 97, 0, 0, 517, 518, 5, 105, 0, 0, 518, 519, 5,
|
||||
110, 0, 0, 519, 520, 5, 115, 0, 0, 520, 521, 5, 95, 0, 0, 521, 522, 5,
|
||||
97, 0, 0, 522, 523, 5, 110, 0, 0, 523, 542, 5, 121, 0, 0, 524, 525, 5,
|
||||
74, 0, 0, 525, 526, 5, 83, 0, 0, 526, 527, 5, 79, 0, 0, 527, 528, 5, 78,
|
||||
0, 0, 528, 529, 5, 95, 0, 0, 529, 530, 5, 67, 0, 0, 530, 531, 5, 79, 0,
|
||||
0, 531, 532, 5, 78, 0, 0, 532, 533, 5, 84, 0, 0, 533, 534, 5, 65, 0, 0,
|
||||
534, 535, 5, 73, 0, 0, 535, 536, 5, 78, 0, 0, 536, 537, 5, 83, 0, 0, 537,
|
||||
538, 5, 95, 0, 0, 538, 539, 5, 65, 0, 0, 539, 540, 5, 78, 0, 0, 540, 542,
|
||||
5, 89, 0, 0, 541, 507, 1, 0, 0, 0, 541, 524, 1, 0, 0, 0, 542, 84, 1, 0,
|
||||
0, 0, 543, 544, 5, 97, 0, 0, 544, 545, 5, 114, 0, 0, 545, 546, 5, 114,
|
||||
0, 0, 546, 547, 5, 97, 0, 0, 547, 548, 5, 121, 0, 0, 548, 549, 5, 95, 0,
|
||||
0, 549, 550, 5, 99, 0, 0, 550, 551, 5, 111, 0, 0, 551, 552, 5, 110, 0,
|
||||
0, 552, 553, 5, 116, 0, 0, 553, 554, 5, 97, 0, 0, 554, 555, 5, 105, 0,
|
||||
0, 555, 556, 5, 110, 0, 0, 556, 572, 5, 115, 0, 0, 557, 558, 5, 65, 0,
|
||||
0, 558, 559, 5, 82, 0, 0, 559, 560, 5, 82, 0, 0, 560, 561, 5, 65, 0, 0,
|
||||
561, 562, 5, 89, 0, 0, 562, 563, 5, 95, 0, 0, 563, 564, 5, 67, 0, 0, 564,
|
||||
565, 5, 79, 0, 0, 565, 566, 5, 78, 0, 0, 566, 567, 5, 84, 0, 0, 567, 568,
|
||||
5, 65, 0, 0, 568, 569, 5, 73, 0, 0, 569, 570, 5, 78, 0, 0, 570, 572, 5,
|
||||
83, 0, 0, 571, 543, 1, 0, 0, 0, 571, 557, 1, 0, 0, 0, 572, 86, 1, 0, 0,
|
||||
0, 573, 574, 5, 97, 0, 0, 574, 575, 5, 114, 0, 0, 575, 576, 5, 114, 0,
|
||||
0, 576, 577, 5, 97, 0, 0, 577, 578, 5, 121, 0, 0, 578, 579, 5, 95, 0, 0,
|
||||
579, 580, 5, 99, 0, 0, 580, 581, 5, 111, 0, 0, 581, 582, 5, 110, 0, 0,
|
||||
582, 583, 5, 116, 0, 0, 583, 584, 5, 97, 0, 0, 584, 585, 5, 105, 0, 0,
|
||||
585, 586, 5, 110, 0, 0, 586, 587, 5, 115, 0, 0, 587, 588, 5, 95, 0, 0,
|
||||
588, 589, 5, 97, 0, 0, 589, 590, 5, 108, 0, 0, 590, 610, 5, 108, 0, 0,
|
||||
591, 592, 5, 65, 0, 0, 592, 593, 5, 82, 0, 0, 593, 594, 5, 82, 0, 0, 594,
|
||||
595, 5, 65, 0, 0, 595, 596, 5, 89, 0, 0, 596, 597, 5, 95, 0, 0, 597, 598,
|
||||
5, 67, 0, 0, 598, 599, 5, 79, 0, 0, 599, 600, 5, 78, 0, 0, 600, 601, 5,
|
||||
84, 0, 0, 601, 602, 5, 65, 0, 0, 602, 603, 5, 73, 0, 0, 603, 604, 5, 78,
|
||||
0, 0, 604, 605, 5, 83, 0, 0, 605, 606, 5, 95, 0, 0, 606, 607, 5, 65, 0,
|
||||
0, 607, 608, 5, 76, 0, 0, 608, 610, 5, 76, 0, 0, 609, 573, 1, 0, 0, 0,
|
||||
609, 591, 1, 0, 0, 0, 610, 88, 1, 0, 0, 0, 611, 612, 5, 97, 0, 0, 612,
|
||||
613, 5, 114, 0, 0, 613, 614, 5, 114, 0, 0, 614, 615, 5, 97, 0, 0, 615,
|
||||
616, 5, 121, 0, 0, 616, 617, 5, 95, 0, 0, 617, 618, 5, 99, 0, 0, 618, 619,
|
||||
5, 111, 0, 0, 619, 620, 5, 110, 0, 0, 620, 621, 5, 116, 0, 0, 621, 622,
|
||||
5, 97, 0, 0, 622, 623, 5, 105, 0, 0, 623, 624, 5, 110, 0, 0, 624, 625,
|
||||
5, 115, 0, 0, 625, 626, 5, 95, 0, 0, 626, 627, 5, 97, 0, 0, 627, 628, 5,
|
||||
110, 0, 0, 628, 648, 5, 121, 0, 0, 629, 630, 5, 65, 0, 0, 630, 631, 5,
|
||||
82, 0, 0, 631, 632, 5, 82, 0, 0, 632, 633, 5, 65, 0, 0, 633, 634, 5, 89,
|
||||
0, 0, 634, 635, 5, 95, 0, 0, 635, 636, 5, 67, 0, 0, 636, 637, 5, 79, 0,
|
||||
0, 637, 638, 5, 78, 0, 0, 638, 639, 5, 84, 0, 0, 639, 640, 5, 65, 0, 0,
|
||||
640, 641, 5, 73, 0, 0, 641, 642, 5, 78, 0, 0, 642, 643, 5, 83, 0, 0, 643,
|
||||
644, 5, 95, 0, 0, 644, 645, 5, 65, 0, 0, 645, 646, 5, 78, 0, 0, 646, 648,
|
||||
5, 89, 0, 0, 647, 611, 1, 0, 0, 0, 647, 629, 1, 0, 0, 0, 648, 90, 1, 0,
|
||||
0, 0, 649, 650, 5, 97, 0, 0, 650, 651, 5, 114, 0, 0, 651, 652, 5, 114,
|
||||
0, 0, 652, 653, 5, 97, 0, 0, 653, 654, 5, 121, 0, 0, 654, 655, 5, 95, 0,
|
||||
0, 655, 656, 5, 108, 0, 0, 656, 657, 5, 101, 0, 0, 657, 658, 5, 110, 0,
|
||||
0, 658, 659, 5, 103, 0, 0, 659, 660, 5, 116, 0, 0, 660, 674, 5, 104, 0,
|
||||
0, 661, 662, 5, 65, 0, 0, 662, 663, 5, 82, 0, 0, 663, 664, 5, 82, 0, 0,
|
||||
664, 665, 5, 65, 0, 0, 665, 666, 5, 89, 0, 0, 666, 667, 5, 95, 0, 0, 667,
|
||||
668, 5, 76, 0, 0, 668, 669, 5, 69, 0, 0, 669, 670, 5, 78, 0, 0, 670, 671,
|
||||
5, 71, 0, 0, 671, 672, 5, 84, 0, 0, 672, 674, 5, 72, 0, 0, 673, 649, 1,
|
||||
0, 0, 0, 673, 661, 1, 0, 0, 0, 674, 92, 1, 0, 0, 0, 675, 676, 5, 115, 0,
|
||||
0, 676, 677, 5, 116, 0, 0, 677, 678, 5, 95, 0, 0, 678, 679, 5, 101, 0,
|
||||
0, 679, 680, 5, 113, 0, 0, 680, 681, 5, 117, 0, 0, 681, 682, 5, 97, 0,
|
||||
0, 682, 683, 5, 108, 0, 0, 683, 694, 5, 115, 0, 0, 684, 685, 5, 83, 0,
|
||||
0, 685, 686, 5, 84, 0, 0, 686, 687, 5, 95, 0, 0, 687, 688, 5, 69, 0, 0,
|
||||
688, 689, 5, 81, 0, 0, 689, 690, 5, 85, 0, 0, 690, 691, 5, 65, 0, 0, 691,
|
||||
692, 5, 76, 0, 0, 692, 694, 5, 83, 0, 0, 693, 675, 1, 0, 0, 0, 693, 684,
|
||||
1, 0, 0, 0, 694, 94, 1, 0, 0, 0, 695, 696, 5, 115, 0, 0, 696, 697, 5, 116,
|
||||
0, 0, 697, 698, 5, 95, 0, 0, 698, 699, 5, 116, 0, 0, 699, 700, 5, 111,
|
||||
0, 0, 700, 701, 5, 117, 0, 0, 701, 702, 5, 99, 0, 0, 702, 703, 5, 104,
|
||||
0, 0, 703, 704, 5, 101, 0, 0, 704, 716, 5, 115, 0, 0, 705, 706, 5, 83,
|
||||
0, 0, 706, 707, 5, 84, 0, 0, 707, 708, 5, 95, 0, 0, 708, 709, 5, 84, 0,
|
||||
0, 709, 710, 5, 79, 0, 0, 710, 711, 5, 85, 0, 0, 711, 712, 5, 67, 0, 0,
|
||||
712, 713, 5, 72, 0, 0, 713, 714, 5, 69, 0, 0, 714, 716, 5, 83, 0, 0, 715,
|
||||
695, 1, 0, 0, 0, 715, 705, 1, 0, 0, 0, 716, 96, 1, 0, 0, 0, 717, 718, 5,
|
||||
115, 0, 0, 718, 719, 5, 116, 0, 0, 719, 720, 5, 95, 0, 0, 720, 721, 5,
|
||||
111, 0, 0, 721, 722, 5, 118, 0, 0, 722, 723, 5, 101, 0, 0, 723, 724, 5,
|
||||
114, 0, 0, 724, 725, 5, 108, 0, 0, 725, 726, 5, 97, 0, 0, 726, 727, 5,
|
||||
112, 0, 0, 727, 740, 5, 115, 0, 0, 728, 729, 5, 83, 0, 0, 729, 730, 5,
|
||||
84, 0, 0, 730, 731, 5, 95, 0, 0, 731, 732, 5, 79, 0, 0, 732, 733, 5, 86,
|
||||
0, 0, 733, 734, 5, 69, 0, 0, 734, 735, 5, 82, 0, 0, 735, 736, 5, 76, 0,
|
||||
0, 736, 737, 5, 65, 0, 0, 737, 738, 5, 80, 0, 0, 738, 740, 5, 83, 0, 0,
|
||||
739, 717, 1, 0, 0, 0, 739, 728, 1, 0, 0, 0, 740, 98, 1, 0, 0, 0, 741, 742,
|
||||
5, 115, 0, 0, 742, 743, 5, 116, 0, 0, 743, 744, 5, 95, 0, 0, 744, 745,
|
||||
5, 99, 0, 0, 745, 746, 5, 114, 0, 0, 746, 747, 5, 111, 0, 0, 747, 748,
|
||||
5, 115, 0, 0, 748, 749, 5, 115, 0, 0, 749, 750, 5, 101, 0, 0, 750, 762,
|
||||
5, 115, 0, 0, 751, 752, 5, 83, 0, 0, 752, 753, 5, 84, 0, 0, 753, 754, 5,
|
||||
95, 0, 0, 754, 755, 5, 67, 0, 0, 755, 756, 5, 82, 0, 0, 756, 757, 5, 79,
|
||||
0, 0, 757, 758, 5, 83, 0, 0, 758, 759, 5, 83, 0, 0, 759, 760, 5, 69, 0,
|
||||
0, 760, 762, 5, 83, 0, 0, 761, 741, 1, 0, 0, 0, 761, 751, 1, 0, 0, 0, 762,
|
||||
100, 1, 0, 0, 0, 763, 764, 5, 115, 0, 0, 764, 765, 5, 116, 0, 0, 765, 766,
|
||||
5, 95, 0, 0, 766, 767, 5, 99, 0, 0, 767, 768, 5, 111, 0, 0, 768, 769, 5,
|
||||
110, 0, 0, 769, 770, 5, 116, 0, 0, 770, 771, 5, 97, 0, 0, 771, 772, 5,
|
||||
105, 0, 0, 772, 773, 5, 110, 0, 0, 773, 786, 5, 115, 0, 0, 774, 775, 5,
|
||||
83, 0, 0, 775, 776, 5, 84, 0, 0, 776, 777, 5, 95, 0, 0, 777, 778, 5, 67,
|
||||
0, 0, 778, 779, 5, 79, 0, 0, 779, 780, 5, 78, 0, 0, 780, 781, 5, 84, 0,
|
||||
0, 781, 782, 5, 65, 0, 0, 782, 783, 5, 73, 0, 0, 783, 784, 5, 78, 0, 0,
|
||||
784, 786, 5, 83, 0, 0, 785, 763, 1, 0, 0, 0, 785, 774, 1, 0, 0, 0, 786,
|
||||
102, 1, 0, 0, 0, 787, 788, 5, 115, 0, 0, 788, 789, 5, 116, 0, 0, 789, 790,
|
||||
5, 95, 0, 0, 790, 791, 5, 105, 0, 0, 791, 792, 5, 110, 0, 0, 792, 793,
|
||||
5, 116, 0, 0, 793, 794, 5, 101, 0, 0, 794, 795, 5, 114, 0, 0, 795, 796,
|
||||
5, 115, 0, 0, 796, 797, 5, 101, 0, 0, 797, 798, 5, 99, 0, 0, 798, 799,
|
||||
5, 116, 0, 0, 799, 814, 5, 115, 0, 0, 800, 801, 5, 83, 0, 0, 801, 802,
|
||||
5, 84, 0, 0, 802, 803, 5, 95, 0, 0, 803, 804, 5, 73, 0, 0, 804, 805, 5,
|
||||
78, 0, 0, 805, 806, 5, 84, 0, 0, 806, 807, 5, 69, 0, 0, 807, 808, 5, 82,
|
||||
0, 0, 808, 809, 5, 83, 0, 0, 809, 810, 5, 69, 0, 0, 810, 811, 5, 67, 0,
|
||||
0, 811, 812, 5, 84, 0, 0, 812, 814, 5, 83, 0, 0, 813, 787, 1, 0, 0, 0,
|
||||
813, 800, 1, 0, 0, 0, 814, 104, 1, 0, 0, 0, 815, 816, 5, 115, 0, 0, 816,
|
||||
817, 5, 116, 0, 0, 817, 818, 5, 95, 0, 0, 818, 819, 5, 119, 0, 0, 819,
|
||||
820, 5, 105, 0, 0, 820, 821, 5, 116, 0, 0, 821, 822, 5, 104, 0, 0, 822,
|
||||
823, 5, 105, 0, 0, 823, 834, 5, 110, 0, 0, 824, 825, 5, 83, 0, 0, 825,
|
||||
826, 5, 84, 0, 0, 826, 827, 5, 95, 0, 0, 827, 828, 5, 87, 0, 0, 828, 829,
|
||||
5, 73, 0, 0, 829, 830, 5, 84, 0, 0, 830, 831, 5, 72, 0, 0, 831, 832, 5,
|
||||
73, 0, 0, 832, 834, 5, 78, 0, 0, 833, 815, 1, 0, 0, 0, 833, 824, 1, 0,
|
||||
0, 0, 834, 106, 1, 0, 0, 0, 835, 836, 5, 115, 0, 0, 836, 837, 5, 116, 0,
|
||||
0, 837, 838, 5, 95, 0, 0, 838, 839, 5, 100, 0, 0, 839, 840, 5, 119, 0,
|
||||
0, 840, 841, 5, 105, 0, 0, 841, 842, 5, 116, 0, 0, 842, 843, 5, 104, 0,
|
||||
0, 843, 844, 5, 105, 0, 0, 844, 856, 5, 110, 0, 0, 845, 846, 5, 83, 0,
|
||||
0, 846, 847, 5, 84, 0, 0, 847, 848, 5, 95, 0, 0, 848, 849, 5, 68, 0, 0,
|
||||
849, 850, 5, 87, 0, 0, 850, 851, 5, 73, 0, 0, 851, 852, 5, 84, 0, 0, 852,
|
||||
853, 5, 72, 0, 0, 853, 854, 5, 73, 0, 0, 854, 856, 5, 78, 0, 0, 855, 835,
|
||||
1, 0, 0, 0, 855, 845, 1, 0, 0, 0, 856, 108, 1, 0, 0, 0, 857, 858, 5, 116,
|
||||
0, 0, 858, 859, 5, 114, 0, 0, 859, 860, 5, 117, 0, 0, 860, 885, 5, 101,
|
||||
0, 0, 861, 862, 5, 84, 0, 0, 862, 863, 5, 114, 0, 0, 863, 864, 5, 117,
|
||||
0, 0, 864, 885, 5, 101, 0, 0, 865, 866, 5, 84, 0, 0, 866, 867, 5, 82, 0,
|
||||
0, 867, 868, 5, 85, 0, 0, 868, 885, 5, 69, 0, 0, 869, 870, 5, 102, 0, 0,
|
||||
870, 871, 5, 97, 0, 0, 871, 872, 5, 108, 0, 0, 872, 873, 5, 115, 0, 0,
|
||||
873, 885, 5, 101, 0, 0, 874, 875, 5, 70, 0, 0, 875, 876, 5, 97, 0, 0, 876,
|
||||
877, 5, 108, 0, 0, 877, 878, 5, 115, 0, 0, 878, 885, 5, 101, 0, 0, 879,
|
||||
880, 5, 70, 0, 0, 880, 881, 5, 65, 0, 0, 881, 882, 5, 76, 0, 0, 882, 883,
|
||||
5, 83, 0, 0, 883, 885, 5, 69, 0, 0, 884, 857, 1, 0, 0, 0, 884, 861, 1,
|
||||
0, 0, 0, 884, 865, 1, 0, 0, 0, 884, 869, 1, 0, 0, 0, 884, 874, 1, 0, 0,
|
||||
0, 884, 879, 1, 0, 0, 0, 885, 110, 1, 0, 0, 0, 886, 891, 3, 139, 69, 0,
|
||||
887, 891, 3, 141, 70, 0, 888, 891, 3, 143, 71, 0, 889, 891, 3, 137, 68,
|
||||
0, 890, 886, 1, 0, 0, 0, 890, 887, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 890,
|
||||
889, 1, 0, 0, 0, 891, 112, 1, 0, 0, 0, 892, 895, 3, 155, 77, 0, 893, 895,
|
||||
3, 157, 78, 0, 894, 892, 1, 0, 0, 0, 894, 893, 1, 0, 0, 0, 895, 114, 1,
|
||||
0, 0, 0, 896, 901, 3, 133, 66, 0, 897, 900, 3, 133, 66, 0, 898, 900, 3,
|
||||
135, 67, 0, 899, 897, 1, 0, 0, 0, 899, 898, 1, 0, 0, 0, 900, 903, 1, 0,
|
||||
0, 0, 901, 899, 1, 0, 0, 0, 901, 902, 1, 0, 0, 0, 902, 116, 1, 0, 0, 0,
|
||||
903, 901, 1, 0, 0, 0, 904, 905, 5, 36, 0, 0, 905, 906, 5, 109, 0, 0, 906,
|
||||
907, 5, 101, 0, 0, 907, 908, 5, 116, 0, 0, 908, 909, 5, 97, 0, 0, 909,
|
||||
118, 1, 0, 0, 0, 910, 912, 3, 123, 61, 0, 911, 910, 1, 0, 0, 0, 911, 912,
|
||||
1, 0, 0, 0, 912, 923, 1, 0, 0, 0, 913, 915, 5, 34, 0, 0, 914, 916, 3, 125,
|
||||
62, 0, 915, 914, 1, 0, 0, 0, 915, 916, 1, 0, 0, 0, 916, 917, 1, 0, 0, 0,
|
||||
917, 924, 5, 34, 0, 0, 918, 920, 5, 39, 0, 0, 919, 921, 3, 127, 63, 0,
|
||||
920, 919, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, 921, 922, 1, 0, 0, 0, 922,
|
||||
924, 5, 39, 0, 0, 923, 913, 1, 0, 0, 0, 923, 918, 1, 0, 0, 0, 924, 120,
|
||||
1, 0, 0, 0, 925, 928, 3, 115, 57, 0, 926, 928, 3, 117, 58, 0, 927, 925,
|
||||
1, 0, 0, 0, 927, 926, 1, 0, 0, 0, 928, 936, 1, 0, 0, 0, 929, 932, 5, 91,
|
||||
0, 0, 930, 933, 3, 119, 59, 0, 931, 933, 3, 139, 69, 0, 932, 930, 1, 0,
|
||||
0, 0, 932, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 935, 5, 93, 0, 0,
|
||||
935, 937, 1, 0, 0, 0, 936, 929, 1, 0, 0, 0, 937, 938, 1, 0, 0, 0, 938,
|
||||
936, 1, 0, 0, 0, 938, 939, 1, 0, 0, 0, 939, 122, 1, 0, 0, 0, 940, 941,
|
||||
5, 117, 0, 0, 941, 944, 5, 56, 0, 0, 942, 944, 7, 0, 0, 0, 943, 940, 1,
|
||||
0, 0, 0, 943, 942, 1, 0, 0, 0, 944, 124, 1, 0, 0, 0, 945, 947, 3, 129,
|
||||
64, 0, 946, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 946, 1, 0, 0, 0,
|
||||
948, 949, 1, 0, 0, 0, 949, 126, 1, 0, 0, 0, 950, 952, 3, 131, 65, 0, 951,
|
||||
950, 1, 0, 0, 0, 952, 953, 1, 0, 0, 0, 953, 951, 1, 0, 0, 0, 953, 954,
|
||||
1, 0, 0, 0, 954, 128, 1, 0, 0, 0, 955, 963, 8, 1, 0, 0, 956, 963, 3, 171,
|
||||
85, 0, 957, 958, 5, 92, 0, 0, 958, 963, 5, 10, 0, 0, 959, 960, 5, 92, 0,
|
||||
0, 960, 961, 5, 13, 0, 0, 961, 963, 5, 10, 0, 0, 962, 955, 1, 0, 0, 0,
|
||||
962, 956, 1, 0, 0, 0, 962, 957, 1, 0, 0, 0, 962, 959, 1, 0, 0, 0, 963,
|
||||
130, 1, 0, 0, 0, 964, 972, 8, 2, 0, 0, 965, 972, 3, 171, 85, 0, 966, 967,
|
||||
5, 92, 0, 0, 967, 972, 5, 10, 0, 0, 968, 969, 5, 92, 0, 0, 969, 970, 5,
|
||||
13, 0, 0, 970, 972, 5, 10, 0, 0, 971, 964, 1, 0, 0, 0, 971, 965, 1, 0,
|
||||
0, 0, 971, 966, 1, 0, 0, 0, 971, 968, 1, 0, 0, 0, 972, 132, 1, 0, 0, 0,
|
||||
973, 974, 7, 3, 0, 0, 974, 134, 1, 0, 0, 0, 975, 976, 7, 4, 0, 0, 976,
|
||||
136, 1, 0, 0, 0, 977, 978, 5, 48, 0, 0, 978, 980, 7, 5, 0, 0, 979, 981,
|
||||
7, 6, 0, 0, 980, 979, 1, 0, 0, 0, 981, 982, 1, 0, 0, 0, 982, 980, 1, 0,
|
||||
0, 0, 982, 983, 1, 0, 0, 0, 983, 138, 1, 0, 0, 0, 984, 988, 3, 145, 72,
|
||||
0, 985, 987, 3, 135, 67, 0, 986, 985, 1, 0, 0, 0, 987, 990, 1, 0, 0, 0,
|
||||
988, 986, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 993, 1, 0, 0, 0, 990,
|
||||
988, 1, 0, 0, 0, 991, 993, 5, 48, 0, 0, 992, 984, 1, 0, 0, 0, 992, 991,
|
||||
1, 0, 0, 0, 993, 140, 1, 0, 0, 0, 994, 998, 5, 48, 0, 0, 995, 997, 3, 147,
|
||||
73, 0, 996, 995, 1, 0, 0, 0, 997, 1000, 1, 0, 0, 0, 998, 996, 1, 0, 0,
|
||||
0, 998, 999, 1, 0, 0, 0, 999, 142, 1, 0, 0, 0, 1000, 998, 1, 0, 0, 0, 1001,
|
||||
1002, 5, 48, 0, 0, 1002, 1003, 7, 7, 0, 0, 1003, 1004, 3, 167, 83, 0, 1004,
|
||||
144, 1, 0, 0, 0, 1005, 1006, 7, 8, 0, 0, 1006, 146, 1, 0, 0, 0, 1007, 1008,
|
||||
7, 9, 0, 0, 1008, 148, 1, 0, 0, 0, 1009, 1010, 7, 10, 0, 0, 1010, 150,
|
||||
1, 0, 0, 0, 1011, 1012, 3, 149, 74, 0, 1012, 1013, 3, 149, 74, 0, 1013,
|
||||
1014, 3, 149, 74, 0, 1014, 1015, 3, 149, 74, 0, 1015, 152, 1, 0, 0, 0,
|
||||
1016, 1017, 5, 92, 0, 0, 1017, 1018, 5, 117, 0, 0, 1018, 1019, 1, 0, 0,
|
||||
0, 1019, 1027, 3, 151, 75, 0, 1020, 1021, 5, 92, 0, 0, 1021, 1022, 5, 85,
|
||||
0, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 3, 151, 75, 0, 1024, 1025, 3,
|
||||
151, 75, 0, 1025, 1027, 1, 0, 0, 0, 1026, 1016, 1, 0, 0, 0, 1026, 1020,
|
||||
1, 0, 0, 0, 1027, 154, 1, 0, 0, 0, 1028, 1030, 3, 159, 79, 0, 1029, 1031,
|
||||
3, 161, 80, 0, 1030, 1029, 1, 0, 0, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1036,
|
||||
1, 0, 0, 0, 1032, 1033, 3, 163, 81, 0, 1033, 1034, 3, 161, 80, 0, 1034,
|
||||
1036, 1, 0, 0, 0, 1035, 1028, 1, 0, 0, 0, 1035, 1032, 1, 0, 0, 0, 1036,
|
||||
156, 1, 0, 0, 0, 1037, 1038, 5, 48, 0, 0, 1038, 1041, 7, 7, 0, 0, 1039,
|
||||
1042, 3, 165, 82, 0, 1040, 1042, 3, 167, 83, 0, 1041, 1039, 1, 0, 0, 0,
|
||||
1041, 1040, 1, 0, 0, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1044, 3, 169, 84,
|
||||
0, 1044, 158, 1, 0, 0, 0, 1045, 1047, 3, 163, 81, 0, 1046, 1045, 1, 0,
|
||||
0, 0, 1046, 1047, 1, 0, 0, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 5, 46,
|
||||
0, 0, 1049, 1054, 3, 163, 81, 0, 1050, 1051, 3, 163, 81, 0, 1051, 1052,
|
||||
5, 46, 0, 0, 1052, 1054, 1, 0, 0, 0, 1053, 1046, 1, 0, 0, 0, 1053, 1050,
|
||||
1, 0, 0, 0, 1054, 160, 1, 0, 0, 0, 1055, 1057, 7, 11, 0, 0, 1056, 1058,
|
||||
7, 12, 0, 0, 1057, 1056, 1, 0, 0, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059,
|
||||
1, 0, 0, 0, 1059, 1060, 3, 163, 81, 0, 1060, 162, 1, 0, 0, 0, 1061, 1063,
|
||||
3, 135, 67, 0, 1062, 1061, 1, 0, 0, 0, 1063, 1064, 1, 0, 0, 0, 1064, 1062,
|
||||
1, 0, 0, 0, 1064, 1065, 1, 0, 0, 0, 1065, 164, 1, 0, 0, 0, 1066, 1068,
|
||||
3, 167, 83, 0, 1067, 1066, 1, 0, 0, 0, 1067, 1068, 1, 0, 0, 0, 1068, 1069,
|
||||
1, 0, 0, 0, 1069, 1070, 5, 46, 0, 0, 1070, 1075, 3, 167, 83, 0, 1071, 1072,
|
||||
3, 167, 83, 0, 1072, 1073, 5, 46, 0, 0, 1073, 1075, 1, 0, 0, 0, 1074, 1067,
|
||||
1, 0, 0, 0, 1074, 1071, 1, 0, 0, 0, 1075, 166, 1, 0, 0, 0, 1076, 1078,
|
||||
3, 149, 74, 0, 1077, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1077,
|
||||
1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 168, 1, 0, 0, 0, 1081, 1083,
|
||||
7, 13, 0, 0, 1082, 1084, 7, 12, 0, 0, 1083, 1082, 1, 0, 0, 0, 1083, 1084,
|
||||
1, 0, 0, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 3, 163, 81, 0, 1086, 170,
|
||||
1, 0, 0, 0, 1087, 1088, 5, 92, 0, 0, 1088, 1103, 7, 14, 0, 0, 1089, 1090,
|
||||
5, 92, 0, 0, 1090, 1092, 3, 147, 73, 0, 1091, 1093, 3, 147, 73, 0, 1092,
|
||||
1091, 1, 0, 0, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1095, 1, 0, 0, 0, 1094,
|
||||
1096, 3, 147, 73, 0, 1095, 1094, 1, 0, 0, 0, 1095, 1096, 1, 0, 0, 0, 1096,
|
||||
1103, 1, 0, 0, 0, 1097, 1098, 5, 92, 0, 0, 1098, 1099, 5, 120, 0, 0, 1099,
|
||||
1100, 1, 0, 0, 0, 1100, 1103, 3, 167, 83, 0, 1101, 1103, 3, 153, 76, 0,
|
||||
1102, 1087, 1, 0, 0, 0, 1102, 1089, 1, 0, 0, 0, 1102, 1097, 1, 0, 0, 0,
|
||||
1102, 1101, 1, 0, 0, 0, 1103, 172, 1, 0, 0, 0, 1104, 1106, 7, 15, 0, 0,
|
||||
1105, 1104, 1, 0, 0, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1105, 1, 0, 0, 0,
|
||||
1107, 1108, 1, 0, 0, 0, 1108, 1109, 1, 0, 0, 0, 1109, 1110, 6, 86, 0, 0,
|
||||
1110, 174, 1, 0, 0, 0, 1111, 1113, 5, 13, 0, 0, 1112, 1114, 5, 10, 0, 0,
|
||||
1113, 1112, 1, 0, 0, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1117, 1, 0, 0, 0,
|
||||
1115, 1117, 5, 10, 0, 0, 1116, 1111, 1, 0, 0, 0, 1116, 1115, 1, 0, 0, 0,
|
||||
1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 87, 0, 0, 1119, 176, 1, 0, 0, 0,
|
||||
70, 0, 215, 229, 251, 277, 305, 323, 331, 366, 374, 390, 414, 425, 431,
|
||||
436, 438, 469, 505, 541, 571, 609, 647, 673, 693, 715, 739, 761, 785, 813,
|
||||
833, 855, 884, 890, 894, 899, 901, 911, 915, 920, 923, 927, 932, 938, 943,
|
||||
948, 953, 962, 971, 982, 988, 992, 998, 1026, 1030, 1035, 1041, 1046, 1053,
|
||||
1057, 1064, 1067, 1074, 1079, 1083, 1092, 1095, 1102, 1107, 1113, 1116,
|
||||
1, 6, 0, 0,
|
||||
}
|
||||
deserializer := antlr.NewATNDeserializer(nil)
|
||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||
@ -580,13 +675,21 @@ const (
|
||||
PlanLexerArrayContainsAll = 44
|
||||
PlanLexerArrayContainsAny = 45
|
||||
PlanLexerArrayLength = 46
|
||||
PlanLexerBooleanConstant = 47
|
||||
PlanLexerIntegerConstant = 48
|
||||
PlanLexerFloatingConstant = 49
|
||||
PlanLexerIdentifier = 50
|
||||
PlanLexerMeta = 51
|
||||
PlanLexerStringLiteral = 52
|
||||
PlanLexerJSONIdentifier = 53
|
||||
PlanLexerWhitespace = 54
|
||||
PlanLexerNewline = 55
|
||||
PlanLexerSTEuqals = 47
|
||||
PlanLexerSTTouches = 48
|
||||
PlanLexerSTOverlaps = 49
|
||||
PlanLexerSTCrosses = 50
|
||||
PlanLexerSTContains = 51
|
||||
PlanLexerSTIntersects = 52
|
||||
PlanLexerSTWithin = 53
|
||||
PlanLexerSTDWithin = 54
|
||||
PlanLexerBooleanConstant = 55
|
||||
PlanLexerIntegerConstant = 56
|
||||
PlanLexerFloatingConstant = 57
|
||||
PlanLexerIdentifier = 58
|
||||
PlanLexerMeta = 59
|
||||
PlanLexerStringLiteral = 60
|
||||
PlanLexerJSONIdentifier = 61
|
||||
PlanLexerWhitespace = 62
|
||||
PlanLexerNewline = 63
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -37,6 +37,9 @@ type PlanVisitor interface {
|
||||
// Visit a parse tree produced by PlanParser#Identifier.
|
||||
VisitIdentifier(ctx *IdentifierContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#STIntersects.
|
||||
VisitSTIntersects(ctx *STIntersectsContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#Like.
|
||||
VisitLike(ctx *LikeContext) interface{}
|
||||
|
||||
@ -52,12 +55,18 @@ type PlanVisitor interface {
|
||||
// Visit a parse tree produced by PlanParser#Boolean.
|
||||
VisitBoolean(ctx *BooleanContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#STDWithin.
|
||||
VisitSTDWithin(ctx *STDWithinContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#Shift.
|
||||
VisitShift(ctx *ShiftContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#Call.
|
||||
VisitCall(ctx *CallContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#STCrosses.
|
||||
VisitSTCrosses(ctx *STCrossesContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#ReverseRange.
|
||||
VisitReverseRange(ctx *ReverseRangeContext) interface{}
|
||||
|
||||
@ -82,12 +91,21 @@ type PlanVisitor interface {
|
||||
// Visit a parse tree produced by PlanParser#TextMatch.
|
||||
VisitTextMatch(ctx *TextMatchContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#STTouches.
|
||||
VisitSTTouches(ctx *STTouchesContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#STContains.
|
||||
VisitSTContains(ctx *STContainsContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#Term.
|
||||
VisitTerm(ctx *TermContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#JSONContains.
|
||||
VisitJSONContains(ctx *JSONContainsContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#STWithin.
|
||||
VisitSTWithin(ctx *STWithinContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#Range.
|
||||
VisitRange(ctx *RangeContext) interface{}
|
||||
|
||||
@ -115,9 +133,15 @@ type PlanVisitor interface {
|
||||
// Visit a parse tree produced by PlanParser#BitAnd.
|
||||
VisitBitAnd(ctx *BitAndContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#STEuqals.
|
||||
VisitSTEuqals(ctx *STEuqalsContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#IsNull.
|
||||
VisitIsNull(ctx *IsNullContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#Power.
|
||||
VisitPower(ctx *PowerContext) interface{}
|
||||
|
||||
// Visit a parse tree produced by PlanParser#STOverlaps.
|
||||
VisitSTOverlaps(ctx *STOverlapsContext) interface{}
|
||||
}
|
||||
|
||||
@ -1619,6 +1619,307 @@ func (v *ParserVisitor) VisitTemplateVariable(ctx *parser.TemplateVariableContex
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitSTEuqals(ctx *parser.STEuqalsContext) interface{} {
|
||||
childExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
columnInfo := toColumnInfo(childExpr)
|
||||
if columnInfo == nil ||
|
||||
(!typeutil.IsGeometryType(columnInfo.GetDataType())) {
|
||||
return fmt.Errorf(
|
||||
"STEuqals operation are only supported on geometry fields now, got: %s", ctx.GetText())
|
||||
}
|
||||
// Process the WKT string
|
||||
element := ctx.StringLiteral().GetText()
|
||||
wktString := element[1 : len(element)-1] // Remove surrounding quotes
|
||||
|
||||
if err := checkValidWKT(wktString); err != nil {
|
||||
return err
|
||||
}
|
||||
expr := &planpb.Expr{
|
||||
Expr: &planpb.Expr_GisfunctionFilterExpr{
|
||||
GisfunctionFilterExpr: &planpb.GISFunctionFilterExpr{
|
||||
ColumnInfo: columnInfo,
|
||||
WktString: wktString,
|
||||
Op: planpb.GISFunctionFilterExpr_Equals,
|
||||
},
|
||||
},
|
||||
}
|
||||
return &ExprWithType{
|
||||
expr: expr,
|
||||
dataType: schemapb.DataType_Bool,
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitSTTouches(ctx *parser.STTouchesContext) interface{} {
|
||||
childExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
columnInfo := toColumnInfo(childExpr)
|
||||
if columnInfo == nil ||
|
||||
(!typeutil.IsGeometryType(columnInfo.GetDataType())) {
|
||||
return fmt.Errorf(
|
||||
"STTouches operation are only supported on geometry fields now, got: %s", ctx.GetText())
|
||||
}
|
||||
// Process the WKT string
|
||||
element := ctx.StringLiteral().GetText()
|
||||
wktString := element[1 : len(element)-1] // Remove surrounding quotes
|
||||
|
||||
if err := checkValidWKT(wktString); err != nil {
|
||||
return err
|
||||
}
|
||||
expr := &planpb.Expr{
|
||||
Expr: &planpb.Expr_GisfunctionFilterExpr{
|
||||
GisfunctionFilterExpr: &planpb.GISFunctionFilterExpr{
|
||||
ColumnInfo: columnInfo,
|
||||
WktString: wktString,
|
||||
Op: planpb.GISFunctionFilterExpr_Touches,
|
||||
},
|
||||
},
|
||||
}
|
||||
return &ExprWithType{
|
||||
expr: expr,
|
||||
dataType: schemapb.DataType_Bool,
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitSTOverlaps(ctx *parser.STOverlapsContext) interface{} {
|
||||
childExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
columnInfo := toColumnInfo(childExpr)
|
||||
if columnInfo == nil ||
|
||||
(!typeutil.IsGeometryType(columnInfo.GetDataType())) {
|
||||
return fmt.Errorf(
|
||||
"STOverlaps operation are only supported on geometry fields now, got: %s", ctx.GetText())
|
||||
}
|
||||
// Process the WKT string
|
||||
element := ctx.StringLiteral().GetText()
|
||||
wktString := element[1 : len(element)-1] // Remove surrounding quotes
|
||||
|
||||
if err := checkValidWKT(wktString); err != nil {
|
||||
return err
|
||||
}
|
||||
expr := &planpb.Expr{
|
||||
Expr: &planpb.Expr_GisfunctionFilterExpr{
|
||||
GisfunctionFilterExpr: &planpb.GISFunctionFilterExpr{
|
||||
ColumnInfo: columnInfo,
|
||||
WktString: wktString,
|
||||
Op: planpb.GISFunctionFilterExpr_Overlaps,
|
||||
},
|
||||
},
|
||||
}
|
||||
return &ExprWithType{
|
||||
expr: expr,
|
||||
dataType: schemapb.DataType_Bool,
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitSTCrosses(ctx *parser.STCrossesContext) interface{} {
|
||||
childExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
columnInfo := toColumnInfo(childExpr)
|
||||
if columnInfo == nil ||
|
||||
(!typeutil.IsGeometryType(columnInfo.GetDataType())) {
|
||||
return fmt.Errorf(
|
||||
"STCrosses operation are only supported on geometry fields now, got: %s", ctx.GetText())
|
||||
}
|
||||
// Process the WKT string
|
||||
element := ctx.StringLiteral().GetText()
|
||||
wktString := element[1 : len(element)-1] // Remove surrounding quotes
|
||||
|
||||
if err := checkValidWKT(wktString); err != nil {
|
||||
return err
|
||||
}
|
||||
expr := &planpb.Expr{
|
||||
Expr: &planpb.Expr_GisfunctionFilterExpr{
|
||||
GisfunctionFilterExpr: &planpb.GISFunctionFilterExpr{
|
||||
ColumnInfo: columnInfo,
|
||||
WktString: wktString,
|
||||
Op: planpb.GISFunctionFilterExpr_Crosses,
|
||||
},
|
||||
},
|
||||
}
|
||||
return &ExprWithType{
|
||||
expr: expr,
|
||||
dataType: schemapb.DataType_Bool,
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitSTContains(ctx *parser.STContainsContext) interface{} {
|
||||
childExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
columnInfo := toColumnInfo(childExpr)
|
||||
if columnInfo == nil ||
|
||||
(!typeutil.IsGeometryType(columnInfo.GetDataType())) {
|
||||
return fmt.Errorf(
|
||||
"STContains operation are only supported on geometry fields now, got: %s", ctx.GetText())
|
||||
}
|
||||
// Process the WKT string
|
||||
element := ctx.StringLiteral().GetText()
|
||||
wktString := element[1 : len(element)-1] // Remove surrounding quotes
|
||||
if err := checkValidWKT(wktString); err != nil {
|
||||
return err
|
||||
}
|
||||
expr := &planpb.Expr{
|
||||
Expr: &planpb.Expr_GisfunctionFilterExpr{
|
||||
GisfunctionFilterExpr: &planpb.GISFunctionFilterExpr{
|
||||
ColumnInfo: columnInfo,
|
||||
WktString: wktString,
|
||||
Op: planpb.GISFunctionFilterExpr_Contains,
|
||||
},
|
||||
},
|
||||
}
|
||||
return &ExprWithType{
|
||||
expr: expr,
|
||||
dataType: schemapb.DataType_Bool,
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitSTIntersects(ctx *parser.STIntersectsContext) interface{} {
|
||||
childExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
columnInfo := toColumnInfo(childExpr)
|
||||
if columnInfo == nil ||
|
||||
(!typeutil.IsGeometryType(columnInfo.GetDataType())) {
|
||||
return fmt.Errorf(
|
||||
"STIntersects operation are only supported on geometry fields now, got: %s", ctx.GetText())
|
||||
}
|
||||
// Process the WKT string
|
||||
element := ctx.StringLiteral().GetText()
|
||||
wktString := element[1 : len(element)-1] // Remove surrounding quotes
|
||||
|
||||
if err := checkValidWKT(wktString); err != nil {
|
||||
return err
|
||||
}
|
||||
expr := &planpb.Expr{
|
||||
Expr: &planpb.Expr_GisfunctionFilterExpr{
|
||||
GisfunctionFilterExpr: &planpb.GISFunctionFilterExpr{
|
||||
ColumnInfo: columnInfo,
|
||||
WktString: wktString,
|
||||
Op: planpb.GISFunctionFilterExpr_Intersects,
|
||||
},
|
||||
},
|
||||
}
|
||||
return &ExprWithType{
|
||||
expr: expr,
|
||||
dataType: schemapb.DataType_Bool,
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitSTWithin(ctx *parser.STWithinContext) interface{} {
|
||||
childExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
columnInfo := toColumnInfo(childExpr)
|
||||
if columnInfo == nil ||
|
||||
(!typeutil.IsGeometryType(columnInfo.GetDataType())) {
|
||||
return fmt.Errorf(
|
||||
"STWithin operation are only supported on geometry fields now, got: %s", ctx.GetText())
|
||||
}
|
||||
// Process the WKT string
|
||||
element := ctx.StringLiteral().GetText()
|
||||
wktString := element[1 : len(element)-1] // Remove surrounding quotes
|
||||
|
||||
if err := checkValidWKT(wktString); err != nil {
|
||||
return err
|
||||
}
|
||||
expr := &planpb.Expr{
|
||||
Expr: &planpb.Expr_GisfunctionFilterExpr{
|
||||
GisfunctionFilterExpr: &planpb.GISFunctionFilterExpr{
|
||||
ColumnInfo: columnInfo,
|
||||
WktString: wktString,
|
||||
Op: planpb.GISFunctionFilterExpr_Within,
|
||||
},
|
||||
},
|
||||
}
|
||||
return &ExprWithType{
|
||||
expr: expr,
|
||||
dataType: schemapb.DataType_Bool,
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitSTDWithin(ctx *parser.STDWithinContext) interface{} {
|
||||
// Process the geometry field identifier
|
||||
childExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
columnInfo := toColumnInfo(childExpr)
|
||||
if columnInfo == nil ||
|
||||
(!typeutil.IsGeometryType(columnInfo.GetDataType())) {
|
||||
return fmt.Errorf(
|
||||
"ST_DWITHIN operation are only supported on geometry fields now, got: %s", ctx.GetText())
|
||||
}
|
||||
|
||||
// Process the WKT string
|
||||
element := ctx.StringLiteral().GetText()
|
||||
wktString := element[1 : len(element)-1] // Remove surrounding quotes
|
||||
|
||||
if err = checkValidPoint(wktString); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Process the distance expression (can be int or float)
|
||||
distanceExpr := ctx.Expr().Accept(v)
|
||||
if err := getError(distanceExpr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Extract distance value - must be a constant expression
|
||||
distanceValueExpr := getValueExpr(distanceExpr)
|
||||
if distanceValueExpr == nil {
|
||||
return fmt.Errorf("distance parameter must be a constant numeric value, got: %s", ctx.Expr().GetText())
|
||||
}
|
||||
|
||||
var distance float64
|
||||
genericValue := distanceValueExpr.GetValue()
|
||||
if genericValue == nil {
|
||||
return fmt.Errorf("invalid distance value: %s", ctx.Expr().GetText())
|
||||
}
|
||||
|
||||
// Handle both integer and floating point values using type assertion
|
||||
switch val := genericValue.GetVal().(type) {
|
||||
case *planpb.GenericValue_Int64Val:
|
||||
distance = float64(val.Int64Val)
|
||||
case *planpb.GenericValue_FloatVal:
|
||||
distance = val.FloatVal
|
||||
default:
|
||||
return fmt.Errorf("distance parameter must be a numeric value (int or float), got: %s", ctx.Expr().GetText())
|
||||
}
|
||||
|
||||
if distance < 0 {
|
||||
return fmt.Errorf("distance parameter must be non-negative, got: %f", distance)
|
||||
}
|
||||
|
||||
// Create the GIS function expression using the bounding box
|
||||
expr := &planpb.Expr{
|
||||
Expr: &planpb.Expr_GisfunctionFilterExpr{
|
||||
GisfunctionFilterExpr: &planpb.GISFunctionFilterExpr{
|
||||
ColumnInfo: columnInfo,
|
||||
WktString: wktString, // Use bounding box instead of original point
|
||||
Op: planpb.GISFunctionFilterExpr_DWithin,
|
||||
Distance: distance, // Keep distance for reference
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return &ExprWithType{
|
||||
expr: expr,
|
||||
dataType: schemapb.DataType_Bool,
|
||||
}
|
||||
}
|
||||
|
||||
func (v *ParserVisitor) VisitTimestamptzCompare(ctx *parser.TimestamptzCompareContext) interface{} {
|
||||
colExpr, err := v.translateIdentifier(ctx.Identifier().GetText())
|
||||
identifier := ctx.Identifier().Accept(v)
|
||||
|
||||
@ -1873,3 +1873,338 @@ func Test_JSONPathNullExpr(t *testing.T) {
|
||||
assert.Equal(t, planStr, plan2Str)
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// GIS Functions Tests
|
||||
// ============================================================================
|
||||
|
||||
func TestExpr_GISFunctions(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test valid GIS function expressions
|
||||
validExprs := []string{
|
||||
// ST_EQUALS tests
|
||||
`st_equals(GeometryField, "POINT(0 0)")`,
|
||||
`ST_EQUALS(GeometryField, "POINT(1.5 2.3)")`,
|
||||
`st_equals(GeometryField, "POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")`,
|
||||
`st_equals(GeometryField, "LINESTRING(0 0, 1 1, 2 2)")`,
|
||||
`st_equals(GeometryField, "MULTIPOINT((0 0), (1 1))")`,
|
||||
|
||||
// ST_INTERSECTS tests
|
||||
`st_intersects(GeometryField, "POINT(0 0)")`,
|
||||
`ST_INTERSECTS(GeometryField, "POLYGON((0 0, 2 0, 2 2, 0 2, 0 0))")`,
|
||||
`st_intersects(GeometryField, "LINESTRING(-1 -1, 1 1)")`,
|
||||
|
||||
// ST_CONTAINS tests
|
||||
`st_contains(GeometryField, "POINT(0.5 0.5)")`,
|
||||
`ST_CONTAINS(GeometryField, "POLYGON((-1 -1, 1 -1, 1 1, -1 1, -1 -1))")`,
|
||||
|
||||
// ST_WITHIN tests
|
||||
`st_within(GeometryField, "POLYGON((-2 -2, 2 -2, 2 2, -2 2, -2 -2))")`,
|
||||
`ST_WITHIN(GeometryField, "POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))")`,
|
||||
|
||||
// ST_TOUCHES tests
|
||||
`st_touches(GeometryField, "POINT(1 1)")`,
|
||||
`ST_TOUCHES(GeometryField, "LINESTRING(0 0, 1 0)")`,
|
||||
|
||||
// ST_OVERLAPS tests
|
||||
`st_overlaps(GeometryField, "POLYGON((0.5 0.5, 1.5 0.5, 1.5 1.5, 0.5 1.5, 0.5 0.5))")`,
|
||||
`ST_OVERLAPS(GeometryField, "POLYGON((-0.5 -0.5, 0.5 -0.5, 0.5 0.5, -0.5 0.5, -0.5 -0.5))")`,
|
||||
|
||||
// ST_CROSSES tests
|
||||
`st_crosses(GeometryField, "LINESTRING(-1 0, 1 0)")`,
|
||||
`ST_CROSSES(GeometryField, "LINESTRING(0 -1, 0 1)")`,
|
||||
|
||||
// ST_DWITHIN tests
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", 1.0)`,
|
||||
`ST_DWITHIN(GeometryField, "POINT(1 1)", 5)`,
|
||||
`st_dwithin(GeometryField, "POINT(2.5 3.7)", 10.5)`,
|
||||
`ST_DWITHIN(GeometryField, "POINT(0.5 0.5)", 2.0)`,
|
||||
`st_dwithin(GeometryField, "POINT(1.0 1.0)", 1)`,
|
||||
|
||||
// Case insensitive tests
|
||||
`St_Equals(GeometryField, "POINT(0 0)")`,
|
||||
`sT_iNtErSeCts(GeometryField, "POINT(1 1)")`,
|
||||
`St_DWithin(GeometryField, "POINT(0 0)", 5.0)`,
|
||||
}
|
||||
|
||||
for _, expr := range validExprs {
|
||||
assertValidExpr(t, schema, expr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpr_GISFunctionsInvalidExpressions(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test invalid GIS function expressions
|
||||
invalidExprs := []string{
|
||||
// Invalid field type
|
||||
`st_equals(Int64Field, "POINT(0 0)")`,
|
||||
`st_intersects(StringField, "POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")`,
|
||||
`st_dwithin(BoolField, "POINT(0 0)", 1.0)`,
|
||||
|
||||
// Invalid WKT strings
|
||||
`st_equals(GeometryField, "INVALID WKT")`,
|
||||
`st_intersects(GeometryField, "POINT()")`,
|
||||
`st_contains(GeometryField, "POLYGON((0 0, 1 0))")`, // Unclosed polygon
|
||||
`st_within(GeometryField, "LINESTRING(0)")`, // Incomplete linestring
|
||||
|
||||
// Missing parameters
|
||||
`st_equals(GeometryField)`,
|
||||
`st_intersects()`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)")`, // Missing distance parameter
|
||||
`st_contains(GeometryField, "POINT(0 0)", 1.0)`, // Extra parameter
|
||||
|
||||
// Invalid distance parameter for ST_DWITHIN
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", "abc")`, // String parameter
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", "invalid")`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", -1.0)`, // Negative distance
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", true)`, // Boolean instead of number
|
||||
|
||||
// Non-existent fields
|
||||
`st_equals(NonExistentField, "POINT(0 0)")`,
|
||||
`st_dwithin(UnknownGeometryField, "POINT(0 0)", 5.0)`,
|
||||
}
|
||||
|
||||
for _, expr := range invalidExprs {
|
||||
assertInvalidExpr(t, schema, expr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpr_GISFunctionsComplexExpressions(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test complex GIS expressions with logical operators
|
||||
complexExprs := []string{
|
||||
// AND combinations
|
||||
`st_equals(GeometryField, "POINT(0 0)") and st_intersects(GeometryField, "POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")`,
|
||||
`st_contains(GeometryField, "POINT(0.5 0.5)") AND st_within(GeometryField, "POLYGON((-1 -1, 1 -1, 1 1, -1 1, -1 -1))")`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", 5.0) and Int64Field > 100`,
|
||||
|
||||
// OR combinations
|
||||
`st_equals(GeometryField, "POINT(0 0)") or st_equals(GeometryField, "POINT(1 1)")`,
|
||||
`st_intersects(GeometryField, "POINT(0 0)") OR st_touches(GeometryField, "POINT(1 1)")`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", 1.0) or st_dwithin(GeometryField, "POINT(5 5)", 2.0)`,
|
||||
|
||||
// NOT combinations
|
||||
`not st_equals(GeometryField, "POINT(0 0)")`,
|
||||
`!(st_intersects(GeometryField, "POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))"))`,
|
||||
`not (st_dwithin(GeometryField, "POINT(0 0)", 1.0))`,
|
||||
|
||||
// Mixed with other field types
|
||||
`st_contains(GeometryField, "POINT(0 0)") and StringField == "test"`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", 5.0) or Int32Field in [1, 2, 3]`,
|
||||
`st_within(GeometryField, "POLYGON((0 0, 10 0, 10 10, 0 10, 0 0))") and FloatField > 0.5`,
|
||||
|
||||
// Nested expressions
|
||||
`(st_equals(GeometryField, "POINT(0 0)") and Int64Field > 0) or (st_intersects(GeometryField, "POINT(1 1)") and StringField != "")`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", 5.0) and (Int32Field > 10 or BoolField == true)`,
|
||||
}
|
||||
|
||||
for _, expr := range complexExprs {
|
||||
assertValidExpr(t, schema, expr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpr_GISFunctionsWithDifferentGeometryTypes(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test different WKT geometry types
|
||||
geometryTests := []struct {
|
||||
gisFunc string
|
||||
geometryWKT string
|
||||
description string
|
||||
}{
|
||||
// Point geometries
|
||||
{"st_equals", "POINT(0 0)", "Simple point"},
|
||||
{"st_intersects", "POINT(1.5 2.3)", "Point with decimals"},
|
||||
{"st_dwithin", "POINT(-1 -1)", "Point with negative coordinates"},
|
||||
|
||||
// LineString geometries
|
||||
{"st_intersects", "LINESTRING(0 0, 1 1)", "Simple linestring"},
|
||||
{"st_crosses", "LINESTRING(-1 0, 1 0)", "Horizontal linestring"},
|
||||
{"st_contains", "LINESTRING(0 0, 1 1, 2 2)", "Multi-segment linestring"},
|
||||
|
||||
// Polygon geometries
|
||||
{"st_within", "POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))", "Simple polygon"},
|
||||
{"st_overlaps", "POLYGON((-1 -1, 1 -1, 1 1, -1 1, -1 -1))", "Centered polygon"},
|
||||
{"st_contains", "POLYGON((0 0, 2 0, 2 2, 0 2, 0 0), (0.5 0.5, 1.5 0.5, 1.5 1.5, 0.5 1.5, 0.5 0.5))", "Polygon with hole"},
|
||||
|
||||
// Multi geometries
|
||||
{"st_intersects", "MULTIPOINT((0 0), (1 1), (2 2))", "Multiple points"},
|
||||
{"st_crosses", "MULTILINESTRING((0 0, 1 0), (1 1, 2 1))", "Multiple linestrings"},
|
||||
{"st_overlaps", "MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)))", "Multiple polygons"},
|
||||
|
||||
// Collection geometries
|
||||
{"st_intersects", "GEOMETRYCOLLECTION(POINT(0 0), LINESTRING(1 1, 2 2))", "Mixed geometry collection"},
|
||||
}
|
||||
|
||||
for _, test := range geometryTests {
|
||||
exprStr := fmt.Sprintf(`%s(GeometryField, "%s")`, test.gisFunc, test.geometryWKT)
|
||||
if test.gisFunc == "st_dwithin" {
|
||||
exprStr = fmt.Sprintf(`%s(GeometryField, "%s", 5.0)`, test.gisFunc, test.geometryWKT)
|
||||
}
|
||||
|
||||
assertValidExpr(t, schema, exprStr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpr_GISFunctionsWithVariousDistances(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test ST_DWITHIN with various distance values
|
||||
distanceTests := []struct {
|
||||
distance interface{}
|
||||
shouldPass bool
|
||||
description string
|
||||
}{
|
||||
// Valid distances (including zero)
|
||||
{0, true, "Zero distance (integer)"},
|
||||
{0.0, true, "Zero distance (float)"},
|
||||
{1, true, "Integer distance"},
|
||||
{1.0, true, "Float distance"},
|
||||
{0.5, true, "Small decimal distance"},
|
||||
{1000.0, true, "Large distance"},
|
||||
{99999999.999, true, "Very large distance"},
|
||||
{0.000001, true, "Very small distance"},
|
||||
|
||||
// Valid distance expressions as strings that should be parsed
|
||||
{"0", true, "String zero integer"},
|
||||
{"0.0", true, "String zero float"},
|
||||
{"1", true, "String integer"},
|
||||
{"1.5", true, "String float"},
|
||||
}
|
||||
|
||||
for _, test := range distanceTests {
|
||||
var exprStr string
|
||||
switch v := test.distance.(type) {
|
||||
case int:
|
||||
exprStr = fmt.Sprintf(`st_dwithin(GeometryField, "POINT(0 0)", %d)`, v)
|
||||
case float64:
|
||||
exprStr = fmt.Sprintf(`st_dwithin(GeometryField, "POINT(0 0)", %g)`, v)
|
||||
case string:
|
||||
exprStr = fmt.Sprintf(`st_dwithin(GeometryField, "POINT(0 0)", %s)`, v)
|
||||
}
|
||||
|
||||
if test.shouldPass {
|
||||
assertValidExpr(t, schema, exprStr)
|
||||
} else {
|
||||
assertInvalidExpr(t, schema, exprStr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpr_GISFunctionsPlanGeneration(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test that GIS expressions can be used in search plans
|
||||
gisExprs := []string{
|
||||
`st_equals(GeometryField, "POINT(0 0)")`,
|
||||
`st_intersects(GeometryField, "POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", 5.0)`,
|
||||
`st_contains(GeometryField, "POINT(0.5 0.5)") and Int64Field > 100`,
|
||||
`st_within(GeometryField, "POLYGON((-1 -1, 1 -1, 1 1, -1 1, -1 -1))") or StringField == "test"`,
|
||||
}
|
||||
|
||||
for _, expr := range gisExprs {
|
||||
plan, err := CreateSearchPlan(schema, expr, "FloatVectorField", &planpb.QueryInfo{
|
||||
Topk: 10,
|
||||
MetricType: "L2",
|
||||
SearchParams: "",
|
||||
RoundDecimal: 0,
|
||||
}, nil, nil)
|
||||
assert.NoError(t, err, "Failed to create plan for expression: %s", expr)
|
||||
assert.NotNil(t, plan, "Plan should not be nil for expression: %s", expr)
|
||||
assert.NotNil(t, plan.GetVectorAnns(), "Vector annotations should not be nil for expression: %s", expr)
|
||||
|
||||
if plan.GetVectorAnns().GetPredicates() != nil {
|
||||
// Verify that the plan contains GIS function filter expressions
|
||||
// This ensures that the GIS expressions are properly parsed and converted to plan nodes
|
||||
predicates := plan.GetVectorAnns().GetPredicates()
|
||||
assert.NotNil(t, predicates, "Predicates should not be nil for GIS expression: %s", expr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpr_GISFunctionsWithJSONFields(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test invalid usage with JSON fields - GIS functions should only work with geometry fields
|
||||
invalidJSONGISExprs := []string{
|
||||
`st_equals(JSONField, "POINT(0 0)")`,
|
||||
`st_intersects($meta["geometry"], "POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")`,
|
||||
`st_dwithin(A, "POINT(0 0)", 5.0)`, // Dynamic field
|
||||
`st_contains(JSONField["geom"], "POINT(0.5 0.5)")`,
|
||||
}
|
||||
|
||||
for _, expr := range invalidJSONGISExprs {
|
||||
assertInvalidExpr(t, schema, expr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpr_GISFunctionsZeroDistance(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test zero distance specifically for ST_DWITHIN
|
||||
zeroDistanceExprs := []string{
|
||||
// Integer zero
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", 0)`,
|
||||
// Float zero
|
||||
`st_dwithin(GeometryField, "POINT(1 1)", 0.0)`,
|
||||
// Zero in complex expressions
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", 0) and Int64Field > 10`,
|
||||
}
|
||||
|
||||
for _, expr := range zeroDistanceExprs {
|
||||
assertValidExpr(t, schema, expr)
|
||||
}
|
||||
|
||||
// Test that zero distance expressions can generate valid search plans
|
||||
for _, expr := range zeroDistanceExprs {
|
||||
plan, err := CreateSearchPlan(schema, expr, "FloatVectorField", &planpb.QueryInfo{
|
||||
Topk: 10,
|
||||
MetricType: "L2",
|
||||
SearchParams: "",
|
||||
RoundDecimal: 0,
|
||||
}, nil, nil)
|
||||
assert.NoError(t, err, "Failed to create plan for zero distance expression: %s", expr)
|
||||
assert.NotNil(t, plan, "Plan should not be nil for zero distance expression: %s", expr)
|
||||
}
|
||||
|
||||
// Test that negative distances are still invalid
|
||||
invalidNegativeExprs := []string{
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", -1)`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", -0.1)`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", -100.5)`,
|
||||
}
|
||||
|
||||
for _, expr := range invalidNegativeExprs {
|
||||
assertInvalidExpr(t, schema, expr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpr_GISFunctionsInvalidParameterTypes(t *testing.T) {
|
||||
schema := newTestSchemaHelper(t)
|
||||
|
||||
// Test various invalid parameter types for ST_DWITHIN distance parameter
|
||||
invalidTypeExprs := []string{
|
||||
// String parameters (should be rejected)
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", "abc")`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", "123")`, // Numeric string
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", "123.45")`, // Float string
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", "0")`, // Zero string
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", "-5")`, // Negative string
|
||||
|
||||
// Boolean parameters (should be rejected)
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", true)`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", false)`,
|
||||
|
||||
// Array/complex parameters (should be rejected)
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", [1, 2, 3])`,
|
||||
`st_dwithin(GeometryField, "POINT(0 0)", GeometryField)`, // Field reference instead of literal
|
||||
}
|
||||
|
||||
for _, expr := range invalidTypeExprs {
|
||||
assertInvalidExpr(t, schema, expr)
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,6 +9,8 @@ import (
|
||||
"unicode"
|
||||
|
||||
"github.com/cockroachdb/errors"
|
||||
"github.com/twpayne/go-geom"
|
||||
"github.com/twpayne/go-geom/encoding/wkt"
|
||||
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
||||
"github.com/milvus-io/milvus/internal/json"
|
||||
@ -166,6 +168,11 @@ func getTargetType(lDataType, rDataType schemapb.DataType) (schemapb.DataType, e
|
||||
return schemapb.DataType_Int64, nil
|
||||
}
|
||||
}
|
||||
if typeutil.IsGeometryType(lDataType) {
|
||||
if typeutil.IsGeometryType(rDataType) {
|
||||
return schemapb.DataType_Geometry, nil
|
||||
}
|
||||
}
|
||||
if typeutil.IsFloatingType(lDataType) {
|
||||
if typeutil.IsJSONType(rDataType) || typeutil.IsArithmetic(rDataType) {
|
||||
return schemapb.DataType_Double, nil
|
||||
@ -798,6 +805,22 @@ func decodeUnicode(input string) string {
|
||||
})
|
||||
}
|
||||
|
||||
func checkValidWKT(wktStr string) error {
|
||||
_, err := wkt.Unmarshal(wktStr)
|
||||
return err
|
||||
}
|
||||
|
||||
func checkValidPoint(wktStr string) error {
|
||||
g, err := wkt.Unmarshal(wktStr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if g.(*geom.Point) == nil {
|
||||
return fmt.Errorf("only supports POINT geometry: %s", wktStr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseISODuration(durationStr string) (*planpb.Interval, error) {
|
||||
iso8601DurationRegex := regexp.MustCompile(
|
||||
`^P` + // P at the start
|
||||
|
||||
@ -240,6 +240,8 @@ func (cit *createIndexTask) parseIndexParams(ctx context.Context) error {
|
||||
return getPrimitiveIndexType(cit.fieldSchema.ElementType), nil
|
||||
} else if typeutil.IsJSONType(dataType) {
|
||||
return Params.AutoIndexConfig.ScalarJSONIndexType.GetValue(), nil
|
||||
} else if typeutil.IsGeometryType(dataType) {
|
||||
return Params.AutoIndexConfig.ScalarGeometryIndexType.GetValue(), nil
|
||||
}
|
||||
return "", fmt.Errorf("create auto index on type:%s is not supported", dataType.String())
|
||||
}()
|
||||
@ -522,6 +524,7 @@ func checkTrain(ctx context.Context, field *schemapb.FieldSchema, indexParams ma
|
||||
indexParams[common.BitmapCardinalityLimitKey] = paramtable.Get().AutoIndexConfig.BitmapCardinalityLimit.GetValue()
|
||||
}
|
||||
}
|
||||
|
||||
checker, err := indexparamcheck.GetIndexCheckerMgrInstance().GetChecker(indexType)
|
||||
if err != nil {
|
||||
log.Ctx(ctx).Warn("Failed to get index checker", zap.String(common.IndexTypeKey, indexType))
|
||||
|
||||
@ -52,6 +52,7 @@ func TestInsertTask_CheckAligned(t *testing.T) {
|
||||
float16VectorFieldSchema := &schemapb.FieldSchema{DataType: schemapb.DataType_Float16Vector}
|
||||
bfloat16VectorFieldSchema := &schemapb.FieldSchema{DataType: schemapb.DataType_BFloat16Vector}
|
||||
varCharFieldSchema := &schemapb.FieldSchema{DataType: schemapb.DataType_VarChar}
|
||||
geometryFieldSchema := &schemapb.FieldSchema{DataType: schemapb.DataType_Geometry}
|
||||
|
||||
numRows := 20
|
||||
dim := 128
|
||||
@ -83,6 +84,7 @@ func TestInsertTask_CheckAligned(t *testing.T) {
|
||||
float16VectorFieldSchema,
|
||||
bfloat16VectorFieldSchema,
|
||||
varCharFieldSchema,
|
||||
geometryFieldSchema,
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -102,6 +104,7 @@ func TestInsertTask_CheckAligned(t *testing.T) {
|
||||
newFloat16VectorFieldData("Float16Vector", numRows, dim),
|
||||
newBFloat16VectorFieldData("BFloat16Vector", numRows, dim),
|
||||
newScalarFieldData(varCharFieldSchema, "VarChar", numRows),
|
||||
newScalarFieldData(geometryFieldSchema, "Geometry", numRows),
|
||||
}
|
||||
err = case2.insertMsg.CheckAligned()
|
||||
assert.NoError(t, err)
|
||||
|
||||
@ -686,6 +686,14 @@ func (t *queryTask) PostExecute(ctx context.Context) error {
|
||||
log.Warn("fail to reduce query result", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
for i, fieldData := range t.result.FieldsData {
|
||||
if fieldData.Type == schemapb.DataType_Geometry {
|
||||
if err := validateGeometryFieldSearchResult(&t.result.FieldsData[i]); err != nil {
|
||||
log.Warn("fail to validate geometry field search result", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
t.result.OutputFields = t.userOutputFields
|
||||
if !t.reQuery {
|
||||
reconstructStructFieldDataForQuery(t.result, t.schema.CollectionSchema)
|
||||
|
||||
@ -840,6 +840,24 @@ func (t *searchTask) PostExecute(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fieldsData := t.result.GetResults().GetFieldsData()
|
||||
for i, fieldData := range fieldsData {
|
||||
if fieldData.Type == schemapb.DataType_Geometry {
|
||||
if err := validateGeometryFieldSearchResult(&fieldsData[i]); err != nil {
|
||||
log.Warn("fail to validate geometry field search result", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if t.result.GetResults().GetGroupByFieldValue() != nil &&
|
||||
t.result.GetResults().GetGroupByFieldValue().GetType() == schemapb.DataType_Geometry {
|
||||
if err := validateGeometryFieldSearchResult(&t.result.Results.GroupByFieldValue); err != nil {
|
||||
log.Warn("fail to validate geometry field search result", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if t.isIterator && t.request.GetGuaranteeTimestamp() == 0 {
|
||||
// first page for iteration, need to set up sessionTs for iterator
|
||||
t.result.SessionTs = getMaxMvccTsFromChannels(t.queryChannelsTs, t.BeginTs())
|
||||
|
||||
@ -73,6 +73,7 @@ const (
|
||||
testFloat16VecField = "f16vec"
|
||||
testBFloat16VecField = "bf16vec"
|
||||
testStructArrayField = "structArray"
|
||||
testGeometryField = "geometry"
|
||||
testVecDim = 128
|
||||
testMaxVarCharLength = 100
|
||||
)
|
||||
@ -89,6 +90,7 @@ func genCollectionSchema(collectionName string) *schemapb.CollectionSchema {
|
||||
testFloat16VecField,
|
||||
testBFloat16VecField,
|
||||
testStructArrayField,
|
||||
testGeometryField,
|
||||
testVecDim,
|
||||
collectionName)
|
||||
}
|
||||
@ -237,6 +239,7 @@ func constructCollectionSchemaByDataType(collectionName string, fieldName2DataTy
|
||||
func constructCollectionSchemaWithAllType(
|
||||
boolField, int32Field, int64Field, floatField, doubleField string,
|
||||
floatVecField, binaryVecField, float16VecField, bfloat16VecField, structArrayField string,
|
||||
geometryField string,
|
||||
dim int,
|
||||
collectionName string,
|
||||
) *schemapb.CollectionSchema {
|
||||
@ -350,6 +353,16 @@ func constructCollectionSchemaWithAllType(
|
||||
IndexParams: nil,
|
||||
AutoID: false,
|
||||
}
|
||||
g := &schemapb.FieldSchema{
|
||||
FieldID: 0,
|
||||
Name: geometryField,
|
||||
IsPrimaryKey: false,
|
||||
Description: "",
|
||||
DataType: schemapb.DataType_Geometry,
|
||||
TypeParams: nil,
|
||||
IndexParams: nil,
|
||||
AutoID: false,
|
||||
}
|
||||
|
||||
// StructArrayField schema for testing
|
||||
structArrayFields := []*schemapb.StructArrayFieldSchema{
|
||||
@ -412,6 +425,7 @@ func constructCollectionSchemaWithAllType(
|
||||
bVec,
|
||||
f16Vec,
|
||||
bf16Vec,
|
||||
g,
|
||||
}
|
||||
} else {
|
||||
schema.Fields = []*schemapb.FieldSchema{
|
||||
@ -422,6 +436,7 @@ func constructCollectionSchemaWithAllType(
|
||||
d,
|
||||
fVec,
|
||||
// bVec,
|
||||
g,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -6,6 +6,9 @@ import (
|
||||
"reflect"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"github.com/twpayne/go-geom/encoding/wkb"
|
||||
"github.com/twpayne/go-geom/encoding/wkbcommon"
|
||||
"github.com/twpayne/go-geom/encoding/wkt"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
|
||||
@ -52,6 +55,55 @@ func withMaxCapCheck() validateOption {
|
||||
}
|
||||
}
|
||||
|
||||
func validateGeometryFieldSearchResult(fieldData **schemapb.FieldData) error {
|
||||
// Check if the field data already contains GeometryWktData
|
||||
_, ok := (*fieldData).GetScalars().Data.(*schemapb.ScalarField_GeometryWktData)
|
||||
if ok {
|
||||
// Already in WKT format, no conversion needed
|
||||
log.Debug("Geometry field data already contains WKT data, skipping conversion",
|
||||
zap.String("fieldName", (*fieldData).GetFieldName()))
|
||||
return nil
|
||||
}
|
||||
wkbArray := (*fieldData).GetScalars().GetGeometryData().GetData()
|
||||
wktArray := make([]string, len(wkbArray))
|
||||
validData := (*fieldData).GetValidData()
|
||||
for i, data := range wkbArray {
|
||||
if validData != nil && !validData[i] {
|
||||
continue
|
||||
}
|
||||
geomT, err := wkb.Unmarshal(data)
|
||||
if err != nil {
|
||||
log.Error("translate the wkb format search result into geometry failed")
|
||||
return err
|
||||
}
|
||||
// now remove MaxDecimalDigits limit
|
||||
wktStr, err := wkt.Marshal(geomT)
|
||||
if err != nil {
|
||||
log.Error("translate the geomery into its wkt failed")
|
||||
return err
|
||||
}
|
||||
wktArray[i] = wktStr
|
||||
}
|
||||
// modify the field data in place
|
||||
*fieldData = &schemapb.FieldData{
|
||||
Type: (*fieldData).GetType(),
|
||||
FieldName: (*fieldData).GetFieldName(),
|
||||
Field: &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_GeometryWktData{
|
||||
GeometryWktData: &schemapb.GeometryWktArray{
|
||||
Data: wktArray,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldId: (*fieldData).GetFieldId(),
|
||||
IsDynamic: (*fieldData).GetIsDynamic(),
|
||||
ValidData: (*fieldData).GetValidData(),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *validateUtil) apply(opts ...validateOption) {
|
||||
for _, opt := range opts {
|
||||
opt(v)
|
||||
@ -101,6 +153,10 @@ func (v *validateUtil) Validate(data []*schemapb.FieldData, helper *typeutil.Sch
|
||||
if err := v.checkTextFieldData(field, fieldSchema); err != nil {
|
||||
return err
|
||||
}
|
||||
case schemapb.DataType_Geometry:
|
||||
if err := v.checkGeometryFieldData(field, fieldSchema); err != nil {
|
||||
return err
|
||||
}
|
||||
case schemapb.DataType_JSON:
|
||||
if err := v.checkJSONFieldData(field, fieldSchema); err != nil {
|
||||
return err
|
||||
@ -428,6 +484,13 @@ func FillWithNullValue(field *schemapb.FieldData, fieldSchema *schemapb.FieldSch
|
||||
return err
|
||||
}
|
||||
|
||||
case *schemapb.ScalarField_GeometryData:
|
||||
if fieldSchema.GetNullable() {
|
||||
sd.GeometryData.Data, err = fillWithNullValueImpl(sd.GeometryData.Data, field.GetValidData())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
default:
|
||||
return merr.WrapErrParameterInvalidMsg(fmt.Sprintf("undefined data type:%s", field.Type.String()))
|
||||
}
|
||||
@ -538,6 +601,27 @@ func FillWithDefaultValue(field *schemapb.FieldData, fieldSchema *schemapb.Field
|
||||
return err
|
||||
}
|
||||
|
||||
case *schemapb.ScalarField_GeometryData:
|
||||
if len(field.GetValidData()) != numRows {
|
||||
msg := fmt.Sprintf("the length of valid_data of field(%s) is wrong", field.GetFieldName())
|
||||
return merr.WrapErrParameterInvalid(numRows, len(field.GetValidData()), msg)
|
||||
}
|
||||
defaultValue := fieldSchema.GetDefaultValue().GetStringData()
|
||||
geomT, err := wkt.Unmarshal(defaultValue)
|
||||
if err != nil {
|
||||
log.Warn("invalid default value for geometry field", zap.Error(err))
|
||||
return merr.WrapErrParameterInvalidMsg("invalid default value for geometry field")
|
||||
}
|
||||
defaultValueWkbBytes, err := wkb.Marshal(geomT, wkb.NDR)
|
||||
if err != nil {
|
||||
log.Warn("invalid default value for geometry field", zap.Error(err))
|
||||
return merr.WrapErrParameterInvalidMsg("invalid default value for geometry field")
|
||||
}
|
||||
sd.GeometryData.Data, err = fillWithDefaultValueImpl(sd.GeometryData.Data, defaultValueWkbBytes, field.GetValidData())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
default:
|
||||
return merr.WrapErrParameterInvalidMsg(fmt.Sprintf("undefined data type:%s", field.Type.String()))
|
||||
}
|
||||
@ -733,9 +817,45 @@ func (v *validateUtil) checkTextFieldData(field *schemapb.FieldData, fieldSchema
|
||||
return merr.WrapErrParameterInvalidMsg("length of text field %s exceeds max length, row number: %d, length: %d, max length: %d",
|
||||
fieldSchema.GetName(), i, len(strArr[i]), maxLength)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *validateUtil) checkGeometryFieldData(field *schemapb.FieldData, fieldSchema *schemapb.FieldSchema) error {
|
||||
geometryArray := field.GetScalars().GetGeometryWktData().GetData()
|
||||
wkbArray := make([][]byte, len(geometryArray))
|
||||
if geometryArray == nil && fieldSchema.GetDefaultValue() == nil && !fieldSchema.GetNullable() {
|
||||
msg := fmt.Sprintf("geometry field '%v' is illegal, array type mismatch", field.GetFieldName())
|
||||
return merr.WrapErrParameterInvalid("need geometry array", "got nil", msg)
|
||||
}
|
||||
|
||||
for index, wktdata := range geometryArray {
|
||||
// ignore parsed geom, the check is during insert task pre execute,so geo data became wkb
|
||||
// fmt.Println(strings.Trim(string(wktdata), "\""))
|
||||
geomT, err := wkt.Unmarshal(wktdata)
|
||||
if err != nil {
|
||||
log.Warn("insert invalid Geometry data!! The wkt data has errors", zap.Error(err))
|
||||
return merr.WrapErrIoFailedReason(err.Error())
|
||||
}
|
||||
wkbArray[index], err = wkb.Marshal(geomT, wkb.NDR, wkbcommon.WKBOptionEmptyPointHandling(wkbcommon.EmptyPointHandlingNaN))
|
||||
if err != nil {
|
||||
log.Warn("insert invalid Geometry data!! Transform to wkb failed, has errors", zap.Error(err))
|
||||
return merr.WrapErrIoFailedReason(err.Error())
|
||||
}
|
||||
}
|
||||
// replace the field data with wkb data array
|
||||
*field = schemapb.FieldData{
|
||||
Type: field.GetType(),
|
||||
FieldName: field.GetFieldName(),
|
||||
Field: &schemapb.FieldData_Scalars{
|
||||
Scalars: &schemapb.ScalarField{
|
||||
Data: &schemapb.ScalarField_GeometryData{GeometryData: &schemapb.GeometryArray{Data: wkbArray}},
|
||||
},
|
||||
},
|
||||
FieldId: field.GetFieldId(),
|
||||
IsDynamic: field.GetIsDynamic(),
|
||||
ValidData: field.GetValidData(),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -1036,7 +1156,7 @@ func newValidateUtil(opts ...validateOption) *validateUtil {
|
||||
}
|
||||
|
||||
func ValidateAutoIndexMmapConfig(isVectorField bool, indexParams map[string]string) error {
|
||||
return common.ValidateAutoIndexMmapConfig(Params.AutoIndexConfig.Enable.GetAsBool(), isVectorField, indexParams)
|
||||
return common.ValidateAutoIndexMmapConfig(paramtable.Get().AutoIndexConfig.Enable.GetAsBool(), isVectorField, indexParams)
|
||||
}
|
||||
|
||||
func wasBm25FunctionInputField(coll *schemapb.CollectionSchema, field *schemapb.FieldSchema) bool {
|
||||
|
||||
@ -22,6 +22,8 @@ import (
|
||||
"strconv"
|
||||
|
||||
"github.com/cockroachdb/errors"
|
||||
"github.com/twpayne/go-geom/encoding/wkb"
|
||||
"github.com/twpayne/go-geom/encoding/wkt"
|
||||
"go.uber.org/zap"
|
||||
"google.golang.org/protobuf/proto"
|
||||
|
||||
@ -154,6 +156,21 @@ func (t *createCollectionTask) checkMaxCollectionsPerDB(ctx context.Context, db2
|
||||
return check(maxColNumPerDB)
|
||||
}
|
||||
|
||||
func checkGeometryDefaultValue(value string) error {
|
||||
geomT, err := wkt.Unmarshal(value)
|
||||
if err != nil {
|
||||
log.Warn("invalid default value for geometry field", zap.Error(err))
|
||||
return merr.WrapErrParameterInvalidMsg("invalid default value for geometry field")
|
||||
}
|
||||
_, err = wkb.Marshal(geomT, wkb.NDR)
|
||||
if err != nil {
|
||||
log.Warn("invalid default value for geometry field", zap.Error(err))
|
||||
return merr.WrapErrParameterInvalidMsg("invalid default value for geometry field")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func hasSystemFields(schema *schemapb.CollectionSchema, systemFields []string) bool {
|
||||
for _, f := range schema.GetFields() {
|
||||
if funcutil.SliceContain(systemFields, f.GetName()) {
|
||||
|
||||
@ -396,6 +396,9 @@ func checkFieldSchema(fieldSchemas []*schemapb.FieldSchema) error {
|
||||
msg := fmt.Sprintf("type not support default_value, type:%s, name:%s", fieldSchema.GetDataType().String(), fieldSchema.GetName())
|
||||
return merr.WrapErrParameterInvalidMsg(msg)
|
||||
}
|
||||
if dtype == schemapb.DataType_Geometry {
|
||||
return checkGeometryDefaultValue(fieldSchema.GetDefaultValue().GetStringData())
|
||||
}
|
||||
errTypeMismatch := func(fieldName, fieldType, defaultValueType string) error {
|
||||
msg := fmt.Sprintf("type (%s) of field (%s) is not equal to the type(%s) of default_value", fieldType, fieldName, defaultValueType)
|
||||
return merr.WrapErrParameterInvalidMsg(msg)
|
||||
|
||||
@ -437,6 +437,16 @@ func AddFieldDataToPayload(eventWriter *insertEventWriter, dataType schemapb.Dat
|
||||
return err
|
||||
}
|
||||
}
|
||||
case schemapb.DataType_Geometry:
|
||||
for i, singleGeometry := range singleData.(*GeometryFieldData).Data {
|
||||
isValid := true
|
||||
if len(singleData.(*GeometryFieldData).ValidData) != 0 {
|
||||
isValid = singleData.(*GeometryFieldData).ValidData[i]
|
||||
}
|
||||
if err = eventWriter.AddOneGeometryToPayload(singleGeometry, isValid); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case schemapb.DataType_BinaryVector:
|
||||
if err = eventWriter.AddBinaryVectorToPayload(singleData.(*BinaryVectorFieldData).Data, singleData.(*BinaryVectorFieldData).Dim); err != nil {
|
||||
return err
|
||||
@ -712,6 +722,17 @@ func AddInsertData(dataType schemapb.DataType, data interface{}, insertData *Ins
|
||||
jsonFieldData.ValidData = append(jsonFieldData.ValidData, validData...)
|
||||
insertData.Data[fieldID] = jsonFieldData
|
||||
return len(singleData), nil
|
||||
case schemapb.DataType_Geometry:
|
||||
singleData := data.([][]byte)
|
||||
if fieldData == nil {
|
||||
fieldData = &GeometryFieldData{Data: make([][]byte, 0, rowNum)}
|
||||
}
|
||||
geometryFieldData := fieldData.(*GeometryFieldData)
|
||||
|
||||
geometryFieldData.Data = append(geometryFieldData.Data, singleData...)
|
||||
geometryFieldData.ValidData = append(geometryFieldData.ValidData, validData...)
|
||||
insertData.Data[fieldID] = geometryFieldData
|
||||
return len(singleData), nil
|
||||
|
||||
case schemapb.DataType_BinaryVector:
|
||||
singleData := data.([]byte)
|
||||
|
||||
@ -122,6 +122,9 @@ func (ds *DataSorter) Swap(i, j int) {
|
||||
case schemapb.DataType_JSON:
|
||||
data := singleData.(*JSONFieldData).Data
|
||||
data[i], data[j] = data[j], data[i]
|
||||
case schemapb.DataType_Geometry:
|
||||
data := singleData.(*GeometryFieldData).Data
|
||||
data[i], data[j] = data[j], data[i]
|
||||
case schemapb.DataType_SparseFloatVector:
|
||||
fieldData := singleData.(*SparseFloatVectorFieldData)
|
||||
fieldData.Contents[i], fieldData.Contents[j] = fieldData.Contents[j], fieldData.Contents[i]
|
||||
|
||||
@ -345,6 +345,16 @@ func NewFieldData(dataType schemapb.DataType, fieldSchema *schemapb.FieldSchema,
|
||||
data.ValidData = make([]bool, 0, cap)
|
||||
}
|
||||
return data, nil
|
||||
|
||||
case schemapb.DataType_Geometry:
|
||||
data := &GeometryFieldData{
|
||||
Data: make([][]byte, 0, cap),
|
||||
Nullable: fieldSchema.GetNullable(),
|
||||
}
|
||||
if fieldSchema.GetNullable() {
|
||||
data.ValidData = make([]bool, 0, cap)
|
||||
}
|
||||
return data, nil
|
||||
case schemapb.DataType_Array:
|
||||
data := &ArrayFieldData{
|
||||
Data: make([]*schemapb.ScalarField, 0, cap),
|
||||
@ -433,6 +443,11 @@ type TimestamptzFieldData struct {
|
||||
ValidData []bool
|
||||
Nullable bool
|
||||
}
|
||||
type GeometryFieldData struct {
|
||||
Data [][]byte
|
||||
ValidData []bool
|
||||
Nullable bool
|
||||
}
|
||||
type BinaryVectorFieldData struct {
|
||||
Data []byte
|
||||
Dim int
|
||||
@ -487,6 +502,7 @@ func (data *TimestamptzFieldData) RowNum() int { return len(data.Data) }
|
||||
func (data *StringFieldData) RowNum() int { return len(data.Data) }
|
||||
func (data *ArrayFieldData) RowNum() int { return len(data.Data) }
|
||||
func (data *JSONFieldData) RowNum() int { return len(data.Data) }
|
||||
func (data *GeometryFieldData) RowNum() int { return len(data.Data) }
|
||||
func (data *BinaryVectorFieldData) RowNum() int { return len(data.Data) * 8 / data.Dim }
|
||||
func (data *FloatVectorFieldData) RowNum() int { return len(data.Data) / data.Dim }
|
||||
func (data *Float16VectorFieldData) RowNum() int { return len(data.Data) / 2 / data.Dim }
|
||||
@ -577,6 +593,13 @@ func (data *JSONFieldData) GetRow(i int) any {
|
||||
return data.Data[i]
|
||||
}
|
||||
|
||||
func (data *GeometryFieldData) GetRow(i int) any {
|
||||
if data.GetNullable() && !data.ValidData[i] {
|
||||
return nil
|
||||
}
|
||||
return data.Data[i]
|
||||
}
|
||||
|
||||
func (data *BinaryVectorFieldData) GetRow(i int) any {
|
||||
return data.Data[i*data.Dim/8 : (i+1)*data.Dim/8]
|
||||
}
|
||||
@ -616,6 +639,7 @@ func (data *TimestamptzFieldData) GetDataRows() any { return data.Data }
|
||||
func (data *StringFieldData) GetDataRows() any { return data.Data }
|
||||
func (data *ArrayFieldData) GetDataRows() any { return data.Data }
|
||||
func (data *JSONFieldData) GetDataRows() any { return data.Data }
|
||||
func (data *GeometryFieldData) GetDataRows() any { return data.Data }
|
||||
func (data *BinaryVectorFieldData) GetDataRows() any { return data.Data }
|
||||
func (data *FloatVectorFieldData) GetDataRows() any { return data.Data }
|
||||
func (data *Float16VectorFieldData) GetDataRows() any { return data.Data }
|
||||
@ -812,6 +836,23 @@ func (data *JSONFieldData) AppendRow(row interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (data *GeometryFieldData) AppendRow(row interface{}) error {
|
||||
if data.GetNullable() && row == nil {
|
||||
data.Data = append(data.Data, make([][]byte, 1)...)
|
||||
data.ValidData = append(data.ValidData, false)
|
||||
return nil
|
||||
}
|
||||
v, ok := row.([]byte)
|
||||
if !ok {
|
||||
return merr.WrapErrParameterInvalid("[]byte", row, "Wrong row type")
|
||||
}
|
||||
if data.GetNullable() {
|
||||
data.ValidData = append(data.ValidData, true)
|
||||
}
|
||||
data.Data = append(data.Data, v)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (data *BinaryVectorFieldData) AppendRow(row interface{}) error {
|
||||
v, ok := row.([]byte)
|
||||
if !ok || len(v) != data.Dim/8 {
|
||||
@ -970,6 +1011,14 @@ func (data *JSONFieldData) AppendRows(dataRows interface{}, validDataRows interf
|
||||
return data.AppendValidDataRows(validDataRows)
|
||||
}
|
||||
|
||||
func (data *GeometryFieldData) AppendRows(dataRows interface{}, validDataRows interface{}) error {
|
||||
err := data.AppendDataRows(dataRows)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return data.AppendValidDataRows(validDataRows)
|
||||
}
|
||||
|
||||
// AppendDataRows appends FLATTEN vectors to field data.
|
||||
func (data *BinaryVectorFieldData) AppendRows(dataRows interface{}, validDataRows interface{}) error {
|
||||
err := data.AppendDataRows(dataRows)
|
||||
@ -1129,6 +1178,15 @@ func (data *JSONFieldData) AppendDataRows(rows interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (data *GeometryFieldData) AppendDataRows(rows interface{}) error {
|
||||
v, ok := rows.([][]byte)
|
||||
if !ok {
|
||||
return merr.WrapErrParameterInvalid("[][]byte", rows, "Wrong rows type")
|
||||
}
|
||||
data.Data = append(data.Data, v...)
|
||||
return nil
|
||||
}
|
||||
|
||||
// AppendDataRows appends FLATTEN vectors to field data.
|
||||
func (data *BinaryVectorFieldData) AppendDataRows(rows interface{}) error {
|
||||
v, ok := rows.([]byte)
|
||||
@ -1346,6 +1404,18 @@ func (data *JSONFieldData) AppendValidDataRows(rows interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (data *GeometryFieldData) AppendValidDataRows(rows interface{}) error {
|
||||
if rows == nil {
|
||||
return nil
|
||||
}
|
||||
v, ok := rows.([]bool)
|
||||
if !ok {
|
||||
return merr.WrapErrParameterInvalid("[]bool", rows, "Wrong rows type")
|
||||
}
|
||||
data.ValidData = append(data.ValidData, v...)
|
||||
return nil
|
||||
}
|
||||
|
||||
// AppendValidDataRows appends FLATTEN vectors to field data.
|
||||
func (data *BinaryVectorFieldData) AppendValidDataRows(rows interface{}) error {
|
||||
if rows != nil {
|
||||
@ -1529,6 +1599,10 @@ func (data *TimestamptzFieldData) GetDataType() schemapb.DataType {
|
||||
func (data *StringFieldData) GetDataType() schemapb.DataType { return data.DataType }
|
||||
func (data *ArrayFieldData) GetDataType() schemapb.DataType { return schemapb.DataType_Array }
|
||||
func (data *JSONFieldData) GetDataType() schemapb.DataType { return schemapb.DataType_JSON }
|
||||
func (data *GeometryFieldData) GetDataType() schemapb.DataType {
|
||||
return schemapb.DataType_Geometry
|
||||
}
|
||||
|
||||
func (data *BinaryVectorFieldData) GetDataType() schemapb.DataType {
|
||||
return schemapb.DataType_BinaryVector
|
||||
}
|
||||
@ -1602,6 +1676,15 @@ func (data *JSONFieldData) GetMemorySize() int {
|
||||
return size + binary.Size(data.ValidData) + binary.Size(data.Nullable)
|
||||
}
|
||||
|
||||
func (data *GeometryFieldData) GetMemorySize() int {
|
||||
var size int
|
||||
// what's the meaning of 16?
|
||||
for _, val := range data.Data {
|
||||
size += len(val) + 16
|
||||
}
|
||||
return size + binary.Size(data.ValidData) + binary.Size(data.Nullable)
|
||||
}
|
||||
|
||||
func (data *BoolFieldData) GetRowSize(i int) int { return 1 }
|
||||
func (data *Int8FieldData) GetRowSize(i int) int { return 1 }
|
||||
func (data *Int16FieldData) GetRowSize(i int) int { return 2 }
|
||||
@ -1617,6 +1700,7 @@ func (data *BFloat16VectorFieldData) GetRowSize(i int) int { return data.Dim * 2
|
||||
func (data *Int8VectorFieldData) GetRowSize(i int) int { return data.Dim }
|
||||
func (data *StringFieldData) GetRowSize(i int) int { return len(data.Data[i]) + 16 }
|
||||
func (data *JSONFieldData) GetRowSize(i int) int { return len(data.Data[i]) + 16 }
|
||||
func (data *GeometryFieldData) GetRowSize(i int) int { return len(data.Data[i]) + 16 }
|
||||
func (data *ArrayFieldData) GetRowSize(i int) int {
|
||||
switch data.ElementType {
|
||||
case schemapb.DataType_Bool:
|
||||
@ -1718,3 +1802,7 @@ func (data *JSONFieldData) GetNullable() bool {
|
||||
func (data *VectorArrayFieldData) GetNullable() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (data *GeometryFieldData) GetNullable() bool {
|
||||
return data.Nullable
|
||||
}
|
||||
|
||||
@ -39,6 +39,7 @@ type PayloadWriterInterface interface {
|
||||
AddOneStringToPayload(string, bool) error
|
||||
AddOneArrayToPayload(*schemapb.ScalarField, bool) error
|
||||
AddOneJSONToPayload([]byte, bool) error
|
||||
AddOneGeometryToPayload(msg []byte, isValid bool) error
|
||||
AddBinaryVectorToPayload([]byte, int) error
|
||||
AddFloatVectorToPayload([]float32, int) error
|
||||
AddFloat16VectorToPayload([]byte, int) error
|
||||
@ -70,6 +71,7 @@ type PayloadReaderInterface interface {
|
||||
GetArrayFromPayload() ([]*schemapb.ScalarField, []bool, error)
|
||||
GetVectorArrayFromPayload() ([]*schemapb.VectorField, error)
|
||||
GetJSONFromPayload() ([][]byte, []bool, error)
|
||||
GetGeometryFromPayload() ([][]byte, []bool, error)
|
||||
GetBinaryVectorFromPayload() ([]byte, int, error)
|
||||
GetFloat16VectorFromPayload() ([]byte, int, error)
|
||||
GetBFloat16VectorFromPayload() ([]byte, int, error)
|
||||
|
||||
@ -178,6 +178,9 @@ func (r *PayloadReader) GetDataFromPayload() (interface{}, []bool, int, error) {
|
||||
case schemapb.DataType_JSON:
|
||||
val, validData, err := r.GetJSONFromPayload()
|
||||
return val, validData, 0, err
|
||||
case schemapb.DataType_Geometry:
|
||||
val, validData, err := r.GetGeometryFromPayload()
|
||||
return val, validData, 0, err
|
||||
default:
|
||||
return nil, nil, 0, merr.WrapErrParameterInvalidMsg("unknown type")
|
||||
}
|
||||
@ -551,6 +554,25 @@ func (r *PayloadReader) GetJSONFromPayload() ([][]byte, []bool, error) {
|
||||
return value, nil, nil
|
||||
}
|
||||
|
||||
func (r *PayloadReader) GetGeometryFromPayload() ([][]byte, []bool, error) {
|
||||
if r.colType != schemapb.DataType_Geometry {
|
||||
return nil, nil, merr.WrapErrParameterInvalidMsg(fmt.Sprintf("failed to get Geometry from datatype %v", r.colType.String()))
|
||||
}
|
||||
|
||||
if r.nullable {
|
||||
return readNullableByteAndConvert(r, func(bytes []byte) []byte {
|
||||
return bytes
|
||||
})
|
||||
}
|
||||
value, err := readByteAndConvert(r, func(bytes parquet.ByteArray) []byte {
|
||||
return bytes
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return value, nil, nil
|
||||
}
|
||||
|
||||
func (r *PayloadReader) GetByteArrayDataSet() (*DataSet[parquet.ByteArray, *file.ByteArrayColumnChunkReader], error) {
|
||||
if r.colType != schemapb.DataType_String && r.colType != schemapb.DataType_VarChar {
|
||||
return nil, fmt.Errorf("failed to get string from datatype %v", r.colType.String())
|
||||
|
||||
@ -234,6 +234,25 @@ func (w *NativePayloadWriter) AddDataToPayloadForUT(data interface{}, validData
|
||||
isValid = validData[0]
|
||||
}
|
||||
return w.AddOneJSONToPayload(val, isValid)
|
||||
case schemapb.DataType_Geometry:
|
||||
val, ok := data.([]byte)
|
||||
if !ok {
|
||||
return merr.WrapErrParameterInvalidMsg("incorrect data type")
|
||||
}
|
||||
isValid := true
|
||||
if len(validData) > 1 {
|
||||
return merr.WrapErrParameterInvalidMsg("wrong input length when add data to payload")
|
||||
}
|
||||
if len(validData) == 0 && w.nullable {
|
||||
return merr.WrapErrParameterInvalidMsg("need pass valid_data when nullable==true")
|
||||
}
|
||||
if len(validData) == 1 {
|
||||
if !w.nullable {
|
||||
return merr.WrapErrParameterInvalidMsg("no need pass valid_data when nullable==false")
|
||||
}
|
||||
isValid = validData[0]
|
||||
}
|
||||
return w.AddOneGeometryToPayload(val, isValid)
|
||||
case schemapb.DataType_BinaryVector:
|
||||
val, ok := data.([]byte)
|
||||
if !ok {
|
||||
@ -614,6 +633,29 @@ func (w *NativePayloadWriter) AddOneJSONToPayload(data []byte, isValid bool) err
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *NativePayloadWriter) AddOneGeometryToPayload(data []byte, isValid bool) error {
|
||||
if w.finished {
|
||||
return errors.New("can't append data to finished geometry payload")
|
||||
}
|
||||
|
||||
if !w.nullable && !isValid {
|
||||
return merr.WrapErrParameterInvalidMsg("not support null when nullable is false")
|
||||
}
|
||||
|
||||
builder, ok := w.builder.(*array.BinaryBuilder)
|
||||
if !ok {
|
||||
return errors.New("failed to cast geometryBuilder")
|
||||
}
|
||||
|
||||
if !isValid {
|
||||
builder.AppendNull()
|
||||
} else {
|
||||
builder.Append(data)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *NativePayloadWriter) AddBinaryVectorToPayload(data []byte, dim int) error {
|
||||
if w.finished {
|
||||
return errors.New("can't append data to finished binary vector payload")
|
||||
@ -869,6 +911,8 @@ func MilvusDataTypeToArrowType(dataType schemapb.DataType, dim int) arrow.DataTy
|
||||
return &arrow.BinaryType{}
|
||||
case schemapb.DataType_JSON:
|
||||
return &arrow.BinaryType{}
|
||||
case schemapb.DataType_Geometry:
|
||||
return &arrow.BinaryType{}
|
||||
case schemapb.DataType_FloatVector:
|
||||
return &arrow.FixedSizeBinaryType{
|
||||
ByteWidth: dim * 4,
|
||||
|
||||
@ -21,6 +21,8 @@ import (
|
||||
"os"
|
||||
|
||||
"github.com/cockroachdb/errors"
|
||||
"github.com/twpayne/go-geom/encoding/wkb"
|
||||
"github.com/twpayne/go-geom/encoding/wkt"
|
||||
"golang.org/x/exp/mmap"
|
||||
"google.golang.org/protobuf/proto"
|
||||
|
||||
@ -376,6 +378,24 @@ func printPayloadValues(colType schemapb.DataType, reader PayloadReaderInterface
|
||||
for i, v := range valids {
|
||||
fmt.Printf("\t\t%d : %v\n", i, v)
|
||||
}
|
||||
// print the wkb bytes
|
||||
case schemapb.DataType_Geometry:
|
||||
rows, err := reader.GetPayloadLengthFromReader()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
val, valids, err := reader.GetGeometryFromPayload()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := 0; i < rows; i++ {
|
||||
geomT, _ := wkb.Unmarshal(val[i])
|
||||
wktStr, _ := wkt.Marshal(geomT)
|
||||
fmt.Printf("\t\t%d : %s\n", i, wktStr)
|
||||
}
|
||||
for i, v := range valids {
|
||||
fmt.Printf("\t\t%d : %v\n", i, v)
|
||||
}
|
||||
case schemapb.DataType_SparseFloatVector:
|
||||
sparseData, _, err := reader.GetSparseFloatVectorFromPayload()
|
||||
if err != nil {
|
||||
|
||||
@ -208,6 +208,13 @@ func TestPrintBinlogFiles(t *testing.T) {
|
||||
{Key: common.DimKey, Value: "4"},
|
||||
},
|
||||
},
|
||||
{
|
||||
FieldID: 113,
|
||||
Name: "field_geometry",
|
||||
IsPrimaryKey: false,
|
||||
Description: "description_15",
|
||||
DataType: schemapb.DataType_Geometry,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -266,6 +273,13 @@ func TestPrintBinlogFiles(t *testing.T) {
|
||||
Data: []byte("12345678"),
|
||||
Dim: 4,
|
||||
},
|
||||
113: &GeometryFieldData{
|
||||
Data: [][]byte{
|
||||
// POINT (30.123 -10.456) and LINESTRING (30.123 -10.456, 10.789 30.123, -40.567 40.890)
|
||||
{0x01, 0x01, 0x00, 0x00, 0x00, 0xD2, 0x4A, 0x4D, 0x6A, 0x8B, 0x3C, 0x5C, 0x0A, 0x0D, 0x1B, 0x4F, 0x4F, 0x9A, 0x3D, 0x40},
|
||||
{0x01, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xD2, 0x4A, 0x4D, 0x6A, 0x8B, 0x3C, 0x5C, 0x0A, 0x0D, 0x1B, 0x4F, 0x4F, 0x9A, 0x3D, 0x40, 0x03, 0xA6, 0xB4, 0xA6, 0xA4, 0xD2, 0xC5, 0xC0, 0xD2, 0x4A, 0x4D, 0x6A, 0x8B, 0x3C, 0x5C, 0x0A},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@ -323,6 +337,13 @@ func TestPrintBinlogFiles(t *testing.T) {
|
||||
Data: []byte("abcdefgh"),
|
||||
Dim: 4,
|
||||
},
|
||||
113: &GeometryFieldData{
|
||||
Data: [][]byte{
|
||||
// POINT (30.123 -10.456) and LINESTRING (30.123 -10.456, 10.789 30.123, -40.567 40.890)
|
||||
{0x01, 0x01, 0x00, 0x00, 0x00, 0xD2, 0x4A, 0x4D, 0x6A, 0x8B, 0x3C, 0x5C, 0x0A, 0x0D, 0x1B, 0x4F, 0x4F, 0x9A, 0x3D, 0x40},
|
||||
{0x01, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xD2, 0x4A, 0x4D, 0x6A, 0x8B, 0x3C, 0x5C, 0x0A, 0x0D, 0x1B, 0x4F, 0x4F, 0x9A, 0x3D, 0x40, 0x03, 0xA6, 0xB4, 0xA6, 0xA4, 0xD2, 0xC5, 0xC0, 0xD2, 0x4A, 0x4D, 0x6A, 0x8B, 0x3C, 0x5C, 0x0A},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
firstBlobs, err := insertCodec.Serialize(1, 1, insertDataFirst)
|
||||
|
||||
@ -444,6 +444,7 @@ var serdeMap = func() map[schemapb.DataType]serdeEntry {
|
||||
|
||||
m[schemapb.DataType_Array] = eagerArrayEntry
|
||||
m[schemapb.DataType_JSON] = byteEntry
|
||||
m[schemapb.DataType_Geometry] = byteEntry
|
||||
|
||||
// ArrayOfVector now implements the standard interface with elementType parameter
|
||||
m[schemapb.DataType_ArrayOfVector] = serdeEntry{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user