mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-08 01:58:34 +08:00
Rename query node package, implement watchDmChannel
Signed-off-by: bigsheeper <yihao.dai@zilliz.com>
This commit is contained in:
parent
44c75bdd2a
commit
f02bd8c8f5
@ -10,16 +10,16 @@ import (
|
|||||||
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
|
|
||||||
querynodeimp "github.com/zilliztech/milvus-distributed/internal/querynode"
|
"github.com/zilliztech/milvus-distributed/internal/querynode"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
|
||||||
querynodeimp.Init()
|
querynode.Init()
|
||||||
fmt.Println("QueryNodeID is", querynodeimp.Params.QueryNodeID)
|
fmt.Println("QueryNodeID is", querynode.Params.QueryNodeID)
|
||||||
// Creates server.
|
// Creates server.
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
svr := querynodeimp.NewQueryNode(ctx, 0)
|
svr := querynode.NewQueryNode(ctx, 0)
|
||||||
|
|
||||||
sc := make(chan os.Signal, 1)
|
sc := make(chan os.Signal, 1)
|
||||||
signal.Notify(sc,
|
signal.Notify(sc,
|
||||||
|
|||||||
@ -17,7 +17,7 @@ import (
|
|||||||
"github.com/zilliztech/milvus-distributed/internal/indexnode"
|
"github.com/zilliztech/milvus-distributed/internal/indexnode"
|
||||||
"github.com/zilliztech/milvus-distributed/internal/master"
|
"github.com/zilliztech/milvus-distributed/internal/master"
|
||||||
"github.com/zilliztech/milvus-distributed/internal/proxynode"
|
"github.com/zilliztech/milvus-distributed/internal/proxynode"
|
||||||
querynodeimp "github.com/zilliztech/milvus-distributed/internal/querynode"
|
"github.com/zilliztech/milvus-distributed/internal/querynode"
|
||||||
"github.com/zilliztech/milvus-distributed/internal/writenode"
|
"github.com/zilliztech/milvus-distributed/internal/writenode"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -101,11 +101,11 @@ func InitProxy(wg *sync.WaitGroup) {
|
|||||||
|
|
||||||
func InitQueryNode(wg *sync.WaitGroup) {
|
func InitQueryNode(wg *sync.WaitGroup) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
querynodeimp.Init()
|
querynode.Init()
|
||||||
fmt.Println("QueryNodeID is", querynodeimp.Params.QueryNodeID)
|
fmt.Println("QueryNodeID is", querynode.Params.QueryNodeID)
|
||||||
// Creates server.
|
// Creates server.
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
svr := querynodeimp.NewQueryNode(ctx, 0)
|
svr := querynode.NewQueryNode(ctx, 0)
|
||||||
|
|
||||||
sc := make(chan os.Signal, 1)
|
sc := make(chan os.Signal, 1)
|
||||||
signal.Notify(sc,
|
signal.Notify(sc,
|
||||||
|
|||||||
@ -1,4 +1,3 @@
|
|||||||
# TODO
|
|
||||||
set(MILVUS_QUERY_SRCS
|
set(MILVUS_QUERY_SRCS
|
||||||
deprecated/BinaryQuery.cpp
|
deprecated/BinaryQuery.cpp
|
||||||
generated/PlanNode.cpp
|
generated/PlanNode.cpp
|
||||||
@ -10,7 +9,7 @@ set(MILVUS_QUERY_SRCS
|
|||||||
visitors/VerifyPlanNodeVisitor.cpp
|
visitors/VerifyPlanNodeVisitor.cpp
|
||||||
visitors/VerifyExprVisitor.cpp
|
visitors/VerifyExprVisitor.cpp
|
||||||
Plan.cpp
|
Plan.cpp
|
||||||
Search.cpp
|
SearchOnGrowing.cpp
|
||||||
SearchOnSealed.cpp
|
SearchOnSealed.cpp
|
||||||
SearchOnIndex.cpp
|
SearchOnIndex.cpp
|
||||||
SearchBruteForce.cpp
|
SearchBruteForce.cpp
|
||||||
|
|||||||
@ -40,7 +40,6 @@ struct UnaryExpr : Expr {
|
|||||||
ExprPtr child_;
|
ExprPtr child_;
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: not enabled in sprint 1
|
|
||||||
struct BoolUnaryExpr : UnaryExpr {
|
struct BoolUnaryExpr : UnaryExpr {
|
||||||
enum class OpType { LogicalNot };
|
enum class OpType { LogicalNot };
|
||||||
OpType op_type_;
|
OpType op_type_;
|
||||||
@ -50,7 +49,6 @@ struct BoolUnaryExpr : UnaryExpr {
|
|||||||
accept(ExprVisitor&) override;
|
accept(ExprVisitor&) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: not enabled in sprint 1
|
|
||||||
struct BoolBinaryExpr : BinaryExpr {
|
struct BoolBinaryExpr : BinaryExpr {
|
||||||
// Note: bitA - bitB == bitA & ~bitB, alias to LogicalMinus
|
// Note: bitA - bitB == bitA & ~bitB, alias to LogicalMinus
|
||||||
enum class OpType { LogicalAnd, LogicalOr, LogicalXor, LogicalMinus };
|
enum class OpType { LogicalAnd, LogicalOr, LogicalXor, LogicalMinus };
|
||||||
|
|||||||
@ -187,7 +187,6 @@ Parser::ParseTermNode(const Json& out_body) {
|
|||||||
std::unique_ptr<VectorPlanNode>
|
std::unique_ptr<VectorPlanNode>
|
||||||
Parser::ParseVecNode(const Json& out_body) {
|
Parser::ParseVecNode(const Json& out_body) {
|
||||||
Assert(out_body.is_object());
|
Assert(out_body.is_object());
|
||||||
// TODO add binary info
|
|
||||||
Assert(out_body.size() == 1);
|
Assert(out_body.size() == 1);
|
||||||
auto iter = out_body.begin();
|
auto iter = out_body.begin();
|
||||||
auto field_name = FieldName(iter.key());
|
auto field_name = FieldName(iter.key());
|
||||||
|
|||||||
@ -9,7 +9,7 @@
|
|||||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||||
// or implied. See the License for the specific language governing permissions and limitations under the License
|
// or implied. See the License for the specific language governing permissions and limitations under the License
|
||||||
|
|
||||||
#include "Search.h"
|
#include "SearchOnGrowing.h"
|
||||||
#include <knowhere/index/vector_index/adapter/VectorAdapter.h>
|
#include <knowhere/index/vector_index/adapter/VectorAdapter.h>
|
||||||
#include <knowhere/index/vector_index/VecIndexFactory.h>
|
#include <knowhere/index/vector_index/VecIndexFactory.h>
|
||||||
#include "segcore/Reduce.h"
|
#include "segcore/Reduce.h"
|
||||||
@ -65,7 +65,6 @@ FloatSearch(const segcore::SegmentGrowingImpl& segment,
|
|||||||
auto topK = info.topK_;
|
auto topK = info.topK_;
|
||||||
auto total_count = topK * num_queries;
|
auto total_count = topK * num_queries;
|
||||||
auto metric_type = GetMetricType(info.metric_type_);
|
auto metric_type = GetMetricType(info.metric_type_);
|
||||||
// TODO: optimize
|
|
||||||
|
|
||||||
// step 3: small indexing search
|
// step 3: small indexing search
|
||||||
// std::vector<int64_t> final_uids(total_count, -1);
|
// std::vector<int64_t> final_uids(total_count, -1);
|
||||||
@ -77,10 +76,9 @@ FloatSearch(const segcore::SegmentGrowingImpl& segment,
|
|||||||
const auto& indexing_entry = indexing_record.get_vec_entry(vecfield_offset);
|
const auto& indexing_entry = indexing_record.get_vec_entry(vecfield_offset);
|
||||||
auto search_conf = indexing_entry.get_search_conf(topK);
|
auto search_conf = indexing_entry.get_search_conf(topK);
|
||||||
|
|
||||||
// TODO: use sub_qr
|
|
||||||
for (int chunk_id = 0; chunk_id < max_indexed_id; ++chunk_id) {
|
for (int chunk_id = 0; chunk_id < max_indexed_id; ++chunk_id) {
|
||||||
auto chunk_size = indexing_entry.get_chunk_size();
|
auto chunk_size = indexing_entry.get_chunk_size();
|
||||||
auto indexing = indexing_entry.get_vec_indexing(chunk_id);
|
auto indexing = indexing_entry.get_indexing(chunk_id);
|
||||||
|
|
||||||
auto sub_view = BitsetSubView(bitset, chunk_id * chunk_size, chunk_size);
|
auto sub_view = BitsetSubView(bitset, chunk_id * chunk_size, chunk_size);
|
||||||
auto sub_qr = SearchOnIndex(query_dataset, *indexing, search_conf, sub_view);
|
auto sub_qr = SearchOnIndex(query_dataset, *indexing, search_conf, sub_view);
|
||||||
@ -197,4 +195,38 @@ BinarySearch(const segcore::SegmentGrowingImpl& segment,
|
|||||||
return Status::OK();
|
return Status::OK();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: refactor and merge this into one
|
||||||
|
template <typename VectorType>
|
||||||
|
void
|
||||||
|
SearchOnGrowing(const segcore::SegmentGrowingImpl& segment,
|
||||||
|
const query::QueryInfo& info,
|
||||||
|
const EmbeddedType<VectorType>* query_data,
|
||||||
|
int64_t num_queries,
|
||||||
|
Timestamp timestamp,
|
||||||
|
const faiss::BitsetView& bitset,
|
||||||
|
QueryResult& results) {
|
||||||
|
static_assert(IsVector<VectorType>);
|
||||||
|
if constexpr (std::is_same_v<VectorType, FloatVector>) {
|
||||||
|
FloatSearch(segment, info, query_data, num_queries, timestamp, bitset, results);
|
||||||
|
} else {
|
||||||
|
BinarySearch(segment, info, query_data, num_queries, timestamp, bitset, results);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
template void
|
||||||
|
SearchOnGrowing<FloatVector>(const segcore::SegmentGrowingImpl& segment,
|
||||||
|
const query::QueryInfo& info,
|
||||||
|
const EmbeddedType<FloatVector>* query_data,
|
||||||
|
int64_t num_queries,
|
||||||
|
Timestamp timestamp,
|
||||||
|
const faiss::BitsetView& bitset,
|
||||||
|
QueryResult& results);
|
||||||
|
template void
|
||||||
|
SearchOnGrowing<BinaryVector>(const segcore::SegmentGrowingImpl& segment,
|
||||||
|
const query::QueryInfo& info,
|
||||||
|
const EmbeddedType<BinaryVector>* query_data,
|
||||||
|
int64_t num_queries,
|
||||||
|
Timestamp timestamp,
|
||||||
|
const faiss::BitsetView& bitset,
|
||||||
|
QueryResult& results);
|
||||||
|
|
||||||
} // namespace milvus::query
|
} // namespace milvus::query
|
||||||
@ -20,23 +20,13 @@ namespace milvus::query {
|
|||||||
using BitmapChunk = boost::dynamic_bitset<>;
|
using BitmapChunk = boost::dynamic_bitset<>;
|
||||||
using BitmapSimple = std::deque<BitmapChunk>;
|
using BitmapSimple = std::deque<BitmapChunk>;
|
||||||
|
|
||||||
// TODO: merge these two search into one
|
template <typename VectorType>
|
||||||
// note: c++17 don't support optional ref
|
void
|
||||||
Status
|
SearchOnGrowing(const segcore::SegmentGrowingImpl& segment,
|
||||||
FloatSearch(const segcore::SegmentGrowingImpl& segment,
|
const query::QueryInfo& info,
|
||||||
const QueryInfo& info,
|
const EmbeddedType<VectorType>* query_data,
|
||||||
const float* query_data,
|
int64_t num_queries,
|
||||||
int64_t num_queries,
|
Timestamp timestamp,
|
||||||
Timestamp timestamp,
|
const faiss::BitsetView& bitset,
|
||||||
const faiss::BitsetView& bitset,
|
QueryResult& results);
|
||||||
QueryResult& results);
|
|
||||||
|
|
||||||
Status
|
|
||||||
BinarySearch(const segcore::SegmentGrowingImpl& segment,
|
|
||||||
const query::QueryInfo& info,
|
|
||||||
const uint8_t* query_data,
|
|
||||||
int64_t num_queries,
|
|
||||||
Timestamp timestamp,
|
|
||||||
const faiss::BitsetView& bitset,
|
|
||||||
QueryResult& results);
|
|
||||||
} // namespace milvus::query
|
} // namespace milvus::query
|
||||||
@ -13,7 +13,7 @@
|
|||||||
|
|
||||||
#include "segcore/SealedIndexingRecord.h"
|
#include "segcore/SealedIndexingRecord.h"
|
||||||
#include "query/PlanNode.h"
|
#include "query/PlanNode.h"
|
||||||
#include "query/Search.h"
|
#include "query/SearchOnGrowing.h"
|
||||||
|
|
||||||
namespace milvus::query {
|
namespace milvus::query {
|
||||||
|
|
||||||
|
|||||||
@ -33,7 +33,7 @@ class SubQueryResult {
|
|||||||
|
|
||||||
static constexpr bool
|
static constexpr bool
|
||||||
is_descending(MetricType metric_type) {
|
is_descending(MetricType metric_type) {
|
||||||
// TODO
|
// TODO(dog): more types
|
||||||
if (metric_type == MetricType::METRIC_INNER_PRODUCT) {
|
if (metric_type == MetricType::METRIC_INNER_PRODUCT) {
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -46,6 +46,11 @@ class ExecPlanNodeVisitor : public PlanNodeVisitor {
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
template <typename VectorType>
|
||||||
|
void
|
||||||
|
VectorVisitorImpl(VectorPlanNode& node);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// std::optional<RetType> ret_;
|
// std::optional<RetType> ret_;
|
||||||
const segcore::SegmentGrowing& segment_;
|
const segcore::SegmentGrowing& segment_;
|
||||||
|
|||||||
@ -16,7 +16,7 @@
|
|||||||
#include "query/generated/ExecPlanNodeVisitor.h"
|
#include "query/generated/ExecPlanNodeVisitor.h"
|
||||||
#include "segcore/SegmentGrowingImpl.h"
|
#include "segcore/SegmentGrowingImpl.h"
|
||||||
#include "query/generated/ExecExprVisitor.h"
|
#include "query/generated/ExecExprVisitor.h"
|
||||||
#include "query/Search.h"
|
#include "query/SearchOnGrowing.h"
|
||||||
#include "query/SearchOnSealed.h"
|
#include "query/SearchOnSealed.h"
|
||||||
|
|
||||||
namespace milvus::query {
|
namespace milvus::query {
|
||||||
@ -45,6 +45,11 @@ class ExecPlanNodeVisitor : PlanNodeVisitor {
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
template <typename VectorType>
|
||||||
|
void
|
||||||
|
VectorVisitorImpl(VectorPlanNode& node);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// std::optional<RetType> ret_;
|
// std::optional<RetType> ret_;
|
||||||
const segcore::SegmentGrowing& segment_;
|
const segcore::SegmentGrowing& segment_;
|
||||||
@ -56,15 +61,16 @@ class ExecPlanNodeVisitor : PlanNodeVisitor {
|
|||||||
} // namespace impl
|
} // namespace impl
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
template <typename VectorType>
|
||||||
void
|
void
|
||||||
ExecPlanNodeVisitor::visit(FloatVectorANNS& node) {
|
ExecPlanNodeVisitor::VectorVisitorImpl(VectorPlanNode& node) {
|
||||||
// TODO: optimize here, remove the dynamic cast
|
// TODO: optimize here, remove the dynamic cast
|
||||||
assert(!ret_.has_value());
|
assert(!ret_.has_value());
|
||||||
auto segment = dynamic_cast<const segcore::SegmentGrowingImpl*>(&segment_);
|
auto segment = dynamic_cast<const segcore::SegmentGrowingImpl*>(&segment_);
|
||||||
AssertInfo(segment, "support SegmentSmallIndex Only");
|
AssertInfo(segment, "support SegmentSmallIndex Only");
|
||||||
RetType ret;
|
RetType ret;
|
||||||
auto& ph = placeholder_group_.at(0);
|
auto& ph = placeholder_group_.at(0);
|
||||||
auto src_data = ph.get_blob<float>();
|
auto src_data = ph.get_blob<EmbeddedType<VectorType>>();
|
||||||
auto num_queries = ph.num_of_queries_;
|
auto num_queries = ph.num_of_queries_;
|
||||||
|
|
||||||
aligned_vector<uint8_t> bitset_holder;
|
aligned_vector<uint8_t> bitset_holder;
|
||||||
@ -80,39 +86,20 @@ ExecPlanNodeVisitor::visit(FloatVectorANNS& node) {
|
|||||||
SearchOnSealed(segment->get_schema(), sealed_indexing, node.query_info_, src_data, num_queries, timestamp_,
|
SearchOnSealed(segment->get_schema(), sealed_indexing, node.query_info_, src_data, num_queries, timestamp_,
|
||||||
view, ret);
|
view, ret);
|
||||||
} else {
|
} else {
|
||||||
FloatSearch(*segment, node.query_info_, src_data, num_queries, timestamp_, view, ret);
|
SearchOnGrowing<VectorType>(*segment, node.query_info_, src_data, num_queries, timestamp_, view, ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
ret_ = ret;
|
ret_ = ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
ExecPlanNodeVisitor::visit(FloatVectorANNS& node) {
|
||||||
|
VectorVisitorImpl<FloatVector>(node);
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ExecPlanNodeVisitor::visit(BinaryVectorANNS& node) {
|
ExecPlanNodeVisitor::visit(BinaryVectorANNS& node) {
|
||||||
// TODO: optimize here, remove the dynamic cast
|
VectorVisitorImpl<BinaryVector>(node);
|
||||||
assert(!ret_.has_value());
|
|
||||||
auto segment = dynamic_cast<const segcore::SegmentGrowingImpl*>(&segment_);
|
|
||||||
AssertInfo(segment, "support SegmentSmallIndex Only");
|
|
||||||
RetType ret;
|
|
||||||
auto& ph = placeholder_group_.at(0);
|
|
||||||
auto src_data = ph.get_blob<uint8_t>();
|
|
||||||
auto num_queries = ph.num_of_queries_;
|
|
||||||
|
|
||||||
aligned_vector<uint8_t> bitset_holder;
|
|
||||||
BitsetView view;
|
|
||||||
if (node.predicate_.has_value()) {
|
|
||||||
ExecExprVisitor::RetType expr_ret = ExecExprVisitor(*segment).call_child(*node.predicate_.value());
|
|
||||||
bitset_holder = AssembleNegBitmap(expr_ret);
|
|
||||||
view = BitsetView(bitset_holder.data(), bitset_holder.size() * 8);
|
|
||||||
}
|
|
||||||
|
|
||||||
auto& sealed_indexing = segment->get_sealed_indexing_record();
|
|
||||||
if (sealed_indexing.is_ready(node.query_info_.field_offset_)) {
|
|
||||||
SearchOnSealed(segment->get_schema(), sealed_indexing, node.query_info_, src_data, num_queries, timestamp_,
|
|
||||||
view, ret);
|
|
||||||
} else {
|
|
||||||
BinarySearch(*segment, node.query_info_, src_data, num_queries, timestamp_, view, ret);
|
|
||||||
}
|
|
||||||
ret_ = ret;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace milvus::query
|
} // namespace milvus::query
|
||||||
|
|||||||
@ -39,7 +39,6 @@ class ThreadSafeVector {
|
|||||||
if (size <= size_) {
|
if (size <= size_) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// TODO: use multithread to speedup
|
|
||||||
std::lock_guard lck(mutex_);
|
std::lock_guard lck(mutex_);
|
||||||
while (vec_.size() < size) {
|
while (vec_.size() < size) {
|
||||||
vec_.emplace_back(std::forward<Args...>(args...));
|
vec_.emplace_back(std::forward<Args...>(args...));
|
||||||
|
|||||||
@ -17,8 +17,6 @@
|
|||||||
namespace milvus::segcore {
|
namespace milvus::segcore {
|
||||||
void
|
void
|
||||||
VecIndexingEntry::BuildIndexRange(int64_t ack_beg, int64_t ack_end, const VectorBase* vec_base) {
|
VecIndexingEntry::BuildIndexRange(int64_t ack_beg, int64_t ack_end, const VectorBase* vec_base) {
|
||||||
// TODO
|
|
||||||
|
|
||||||
assert(field_meta_.get_data_type() == DataType::VECTOR_FLOAT);
|
assert(field_meta_.get_data_type() == DataType::VECTOR_FLOAT);
|
||||||
auto dim = field_meta_.get_dim();
|
auto dim = field_meta_.get_dim();
|
||||||
|
|
||||||
@ -31,7 +29,6 @@ VecIndexingEntry::BuildIndexRange(int64_t ack_beg, int64_t ack_end, const Vector
|
|||||||
for (int chunk_id = ack_beg; chunk_id < ack_end; chunk_id++) {
|
for (int chunk_id = ack_beg; chunk_id < ack_end; chunk_id++) {
|
||||||
const auto& chunk = source->get_chunk(chunk_id);
|
const auto& chunk = source->get_chunk(chunk_id);
|
||||||
// build index for chunk
|
// build index for chunk
|
||||||
// TODO
|
|
||||||
auto indexing = std::make_unique<knowhere::IVF>();
|
auto indexing = std::make_unique<knowhere::IVF>();
|
||||||
auto dataset = knowhere::GenDataset(source->get_chunk_size(), dim, chunk.data());
|
auto dataset = knowhere::GenDataset(source->get_chunk_size(), dim, chunk.data());
|
||||||
indexing->Train(dataset, conf);
|
indexing->Train(dataset, conf);
|
||||||
|
|||||||
@ -47,6 +47,9 @@ class IndexingEntry {
|
|||||||
return chunk_size_;
|
return chunk_size_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
virtual knowhere::Index*
|
||||||
|
get_indexing(int64_t chunk_id) const = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
// additional info
|
// additional info
|
||||||
const FieldMeta& field_meta_;
|
const FieldMeta& field_meta_;
|
||||||
@ -62,7 +65,7 @@ class ScalarIndexingEntry : public IndexingEntry {
|
|||||||
|
|
||||||
// concurrent
|
// concurrent
|
||||||
knowhere::scalar::StructuredIndex<T>*
|
knowhere::scalar::StructuredIndex<T>*
|
||||||
get_indexing(int64_t chunk_id) const {
|
get_indexing(int64_t chunk_id) const override {
|
||||||
Assert(!field_meta_.is_vector());
|
Assert(!field_meta_.is_vector());
|
||||||
return data_.at(chunk_id).get();
|
return data_.at(chunk_id).get();
|
||||||
}
|
}
|
||||||
@ -80,7 +83,7 @@ class VecIndexingEntry : public IndexingEntry {
|
|||||||
|
|
||||||
// concurrent
|
// concurrent
|
||||||
knowhere::VecIndex*
|
knowhere::VecIndex*
|
||||||
get_vec_indexing(int64_t chunk_id) const {
|
get_indexing(int64_t chunk_id) const override {
|
||||||
Assert(field_meta_.is_vector());
|
Assert(field_meta_.is_vector());
|
||||||
return data_.at(chunk_id).get();
|
return data_.at(chunk_id).get();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -39,8 +39,6 @@ class SegmentGrowingImpl : public SegmentGrowing {
|
|||||||
int64_t
|
int64_t
|
||||||
PreInsert(int64_t size) override;
|
PreInsert(int64_t size) override;
|
||||||
|
|
||||||
// TODO: originally, id should be put into data_chunk
|
|
||||||
// TODO: Is it ok to put them the other side?
|
|
||||||
Status
|
Status
|
||||||
Insert(int64_t reserved_offset,
|
Insert(int64_t reserved_offset,
|
||||||
int64_t size,
|
int64_t size,
|
||||||
@ -95,6 +93,22 @@ class SegmentGrowingImpl : public SegmentGrowing {
|
|||||||
return *schema_;
|
return *schema_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// return count of index that has index, i.e., [0, num_chunk_index) have built index
|
||||||
|
int64_t
|
||||||
|
num_chunk_index_safe(FieldOffset field_offset) const final {
|
||||||
|
return indexing_record_.get_finished_ack();
|
||||||
|
}
|
||||||
|
|
||||||
|
const knowhere::Index*
|
||||||
|
chunk_index_impl(FieldOffset field_offset, int64_t chunk_id) const final {
|
||||||
|
return indexing_record_.get_entry(field_offset).get_indexing(chunk_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
int64_t
|
||||||
|
chunk_size() const final {
|
||||||
|
return chunk_size_;
|
||||||
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
ssize_t
|
ssize_t
|
||||||
get_row_count() const override {
|
get_row_count() const override {
|
||||||
|
|||||||
@ -14,6 +14,8 @@
|
|||||||
#include "common/Schema.h"
|
#include "common/Schema.h"
|
||||||
#include "query/Plan.h"
|
#include "query/Plan.h"
|
||||||
#include "common/Span.h"
|
#include "common/Span.h"
|
||||||
|
#include "IndexingEntry.h"
|
||||||
|
#include <knowhere/index/vector_index/VecIndex.h>
|
||||||
|
|
||||||
namespace milvus::segcore {
|
namespace milvus::segcore {
|
||||||
|
|
||||||
@ -52,10 +54,30 @@ class SegmentInternalInterface : public SegmentInterface {
|
|||||||
return static_cast<Span<T>>(chunk_data_impl(field_offset, chunk_id));
|
return static_cast<Span<T>>(chunk_data_impl(field_offset, chunk_id));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
virtual int64_t
|
||||||
|
num_chunk_index_safe(FieldOffset field_offset) const = 0;
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
const knowhere::scalar::StructuredIndex<T>&
|
||||||
|
chunk_scalar_index(FieldOffset field_offset, int64_t chunk_id) const {
|
||||||
|
static_assert(IsScalar<T>);
|
||||||
|
using IndexType = knowhere::scalar::StructuredIndex<T>;
|
||||||
|
auto base_ptr = chunk_index_impl(field_offset, chunk_id);
|
||||||
|
auto ptr = dynamic_cast<const IndexType*>(base_ptr);
|
||||||
|
AssertInfo(ptr, "entry mismatch");
|
||||||
|
return *ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual int64_t
|
||||||
|
chunk_size() const = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
// blob and row_count
|
// blob and row_count
|
||||||
virtual SpanBase
|
virtual SpanBase
|
||||||
chunk_data_impl(FieldOffset field_offset, int64_t chunk_id) const = 0;
|
chunk_data_impl(FieldOffset field_offset, int64_t chunk_id) const = 0;
|
||||||
|
|
||||||
|
virtual const knowhere::Index*
|
||||||
|
chunk_index_impl(FieldOffset field_offset, int64_t chunk_id) const = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace milvus::segcore
|
} // namespace milvus::segcore
|
||||||
|
|||||||
@ -19,7 +19,6 @@ NewCollection(const char* schema_proto_blob) {
|
|||||||
|
|
||||||
auto collection = std::make_unique<milvus::segcore::Collection>(proto);
|
auto collection = std::make_unique<milvus::segcore::Collection>(proto);
|
||||||
|
|
||||||
// TODO: delete print
|
|
||||||
std::cout << "create collection " << collection->get_collection_name() << std::endl;
|
std::cout << "create collection " << collection->get_collection_name() << std::endl;
|
||||||
|
|
||||||
return (void*)collection.release();
|
return (void*)collection.release();
|
||||||
@ -29,8 +28,8 @@ void
|
|||||||
DeleteCollection(CCollection collection) {
|
DeleteCollection(CCollection collection) {
|
||||||
auto col = (milvus::segcore::Collection*)collection;
|
auto col = (milvus::segcore::Collection*)collection;
|
||||||
|
|
||||||
// TODO: delete print
|
|
||||||
std::cout << "delete collection " << col->get_collection_name() << std::endl;
|
std::cout << "delete collection " << col->get_collection_name() << std::endl;
|
||||||
|
|
||||||
delete col;
|
delete col;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -27,7 +27,6 @@ NewSegment(CCollection collection, uint64_t segment_id) {
|
|||||||
|
|
||||||
auto segment = milvus::segcore::CreateGrowingSegment(col->get_schema());
|
auto segment = milvus::segcore::CreateGrowingSegment(col->get_schema());
|
||||||
|
|
||||||
// TODO: delete print
|
|
||||||
std::cout << "create segment " << segment_id << std::endl;
|
std::cout << "create segment " << segment_id << std::endl;
|
||||||
return (void*)segment.release();
|
return (void*)segment.release();
|
||||||
}
|
}
|
||||||
@ -36,7 +35,6 @@ void
|
|||||||
DeleteSegment(CSegmentBase segment) {
|
DeleteSegment(CSegmentBase segment) {
|
||||||
auto s = (milvus::segcore::SegmentGrowing*)segment;
|
auto s = (milvus::segcore::SegmentGrowing*)segment;
|
||||||
|
|
||||||
// TODO: delete print
|
|
||||||
std::cout << "delete segment " << std::endl;
|
std::cout << "delete segment " << std::endl;
|
||||||
delete s;
|
delete s;
|
||||||
}
|
}
|
||||||
@ -78,17 +76,12 @@ Insert(CSegmentBase c_segment,
|
|||||||
status.error_msg = strdup(e.what());
|
status.error_msg = strdup(e.what());
|
||||||
return status;
|
return status;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: delete print
|
|
||||||
// std::cout << "do segment insert, sizeof_per_row = " << sizeof_per_row << std::endl;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int64_t
|
int64_t
|
||||||
PreInsert(CSegmentBase c_segment, int64_t size) {
|
PreInsert(CSegmentBase c_segment, int64_t size) {
|
||||||
auto segment = (milvus::segcore::SegmentGrowing*)c_segment;
|
auto segment = (milvus::segcore::SegmentGrowing*)c_segment;
|
||||||
|
|
||||||
// TODO: delete print
|
|
||||||
// std::cout << "PreInsert segment " << std::endl;
|
|
||||||
return segment->PreInsert(size);
|
return segment->PreInsert(size);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -116,8 +109,6 @@ int64_t
|
|||||||
PreDelete(CSegmentBase c_segment, int64_t size) {
|
PreDelete(CSegmentBase c_segment, int64_t size) {
|
||||||
auto segment = (milvus::segcore::SegmentGrowing*)c_segment;
|
auto segment = (milvus::segcore::SegmentGrowing*)c_segment;
|
||||||
|
|
||||||
// TODO: delete print
|
|
||||||
// std::cout << "PreDelete segment " << std::endl;
|
|
||||||
return segment->PreDelete(size);
|
return segment->PreDelete(size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -8,17 +8,17 @@ import (
|
|||||||
|
|
||||||
"github.com/zilliztech/milvus-distributed/internal/proto/commonpb"
|
"github.com/zilliztech/milvus-distributed/internal/proto/commonpb"
|
||||||
"github.com/zilliztech/milvus-distributed/internal/proto/querypb"
|
"github.com/zilliztech/milvus-distributed/internal/proto/querypb"
|
||||||
querynodeimp "github.com/zilliztech/milvus-distributed/internal/querynode"
|
"github.com/zilliztech/milvus-distributed/internal/querynode"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Server struct {
|
type Server struct {
|
||||||
grpcServer *grpc.Server
|
grpcServer *grpc.Server
|
||||||
node querynodeimp.Node
|
node querynode.Node
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewServer(ctx context.Context, queryNodeID uint64) *Server {
|
func NewServer(ctx context.Context, queryNodeID uint64) *Server {
|
||||||
return &Server{
|
return &Server{
|
||||||
node: querynodeimp.NewQueryNode(ctx, queryNodeID),
|
node: querynode.NewQueryNode(ctx, queryNodeID),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -20,6 +20,16 @@ func (node *QueryNode) AddQueryChannel(ctx context.Context, in *queryPb.AddQuery
|
|||||||
|
|
||||||
return status, errors.New(errMsg)
|
return status, errors.New(errMsg)
|
||||||
default:
|
default:
|
||||||
|
if node.searchService == nil || node.searchService.searchMsgStream == nil {
|
||||||
|
errMsg := "null search service or null search message stream"
|
||||||
|
status := &commonpb.Status{
|
||||||
|
ErrorCode: commonpb.ErrorCode_UNEXPECTED_ERROR,
|
||||||
|
Reason: errMsg,
|
||||||
|
}
|
||||||
|
|
||||||
|
return status, errors.New(errMsg)
|
||||||
|
}
|
||||||
|
|
||||||
searchStream, ok := node.searchService.searchMsgStream.(*msgstream.PulsarMsgStream)
|
searchStream, ok := node.searchService.searchMsgStream.(*msgstream.PulsarMsgStream)
|
||||||
if !ok {
|
if !ok {
|
||||||
errMsg := "type assertion failed for search message stream"
|
errMsg := "type assertion failed for search message stream"
|
||||||
@ -71,6 +81,16 @@ func (node *QueryNode) RemoveQueryChannel(ctx context.Context, in *queryPb.Remov
|
|||||||
|
|
||||||
return status, errors.New(errMsg)
|
return status, errors.New(errMsg)
|
||||||
default:
|
default:
|
||||||
|
if node.searchService == nil || node.searchService.searchMsgStream == nil {
|
||||||
|
errMsg := "null search service or null search result message stream"
|
||||||
|
status := &commonpb.Status{
|
||||||
|
ErrorCode: commonpb.ErrorCode_UNEXPECTED_ERROR,
|
||||||
|
Reason: errMsg,
|
||||||
|
}
|
||||||
|
|
||||||
|
return status, errors.New(errMsg)
|
||||||
|
}
|
||||||
|
|
||||||
searchStream, ok := node.searchService.searchMsgStream.(*msgstream.PulsarMsgStream)
|
searchStream, ok := node.searchService.searchMsgStream.(*msgstream.PulsarMsgStream)
|
||||||
if !ok {
|
if !ok {
|
||||||
errMsg := "type assertion failed for search message stream"
|
errMsg := "type assertion failed for search message stream"
|
||||||
@ -124,30 +144,38 @@ func (node *QueryNode) WatchDmChannels(ctx context.Context, in *queryPb.WatchDmC
|
|||||||
|
|
||||||
return status, errors.New(errMsg)
|
return status, errors.New(errMsg)
|
||||||
default:
|
default:
|
||||||
// TODO: add dmMsgStream reference to dataSyncService
|
if node.dataSyncService == nil || node.dataSyncService.dmStream == nil {
|
||||||
//fgDMMsgStream, ok := node.dataSyncService.dmMsgStream.(*msgstream.PulsarMsgStream)
|
errMsg := "null data sync service or null data manipulation stream"
|
||||||
//if !ok {
|
status := &commonpb.Status{
|
||||||
// errMsg := "type assertion failed for dm message stream"
|
ErrorCode: commonpb.ErrorCode_UNEXPECTED_ERROR,
|
||||||
// status := &commonpb.Status{
|
Reason: errMsg,
|
||||||
// ErrorCode: commonpb.ErrorCode_UNEXPECTED_ERROR,
|
}
|
||||||
// Reason: errMsg,
|
|
||||||
// }
|
return status, errors.New(errMsg)
|
||||||
//
|
}
|
||||||
// return status, errors.New(errMsg)
|
|
||||||
//}
|
fgDMMsgStream, ok := node.dataSyncService.dmStream.(*msgstream.PulsarMsgStream)
|
||||||
//
|
if !ok {
|
||||||
//// add request channel
|
errMsg := "type assertion failed for dm message stream"
|
||||||
//pulsarBufSize := Params.SearchPulsarBufSize
|
status := &commonpb.Status{
|
||||||
//consumeChannels := in.ChannelIDs
|
ErrorCode: commonpb.ErrorCode_UNEXPECTED_ERROR,
|
||||||
//consumeSubName := Params.MsgChannelSubName
|
Reason: errMsg,
|
||||||
//unmarshalDispatcher := msgstream.NewUnmarshalDispatcher()
|
}
|
||||||
//fgDMMsgStream.CreatePulsarConsumers(consumeChannels, consumeSubName, unmarshalDispatcher, pulsarBufSize)
|
|
||||||
//
|
return status, errors.New(errMsg)
|
||||||
//status := &commonpb.Status{
|
}
|
||||||
// ErrorCode: commonpb.ErrorCode_SUCCESS,
|
|
||||||
//}
|
// add request channel
|
||||||
//return status, nil
|
pulsarBufSize := Params.SearchPulsarBufSize
|
||||||
return nil, nil
|
consumeChannels := in.ChannelIDs
|
||||||
|
consumeSubName := Params.MsgChannelSubName
|
||||||
|
unmarshalDispatcher := msgstream.NewUnmarshalDispatcher()
|
||||||
|
fgDMMsgStream.CreatePulsarConsumers(consumeChannels, consumeSubName, unmarshalDispatcher, pulsarBufSize)
|
||||||
|
|
||||||
|
status := &commonpb.Status{
|
||||||
|
ErrorCode: commonpb.ErrorCode_SUCCESS,
|
||||||
|
}
|
||||||
|
return status, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|||||||
@ -1,9 +1,10 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
|
"github.com/zilliztech/milvus-distributed/internal/msgstream"
|
||||||
"github.com/zilliztech/milvus-distributed/internal/util/flowgraph"
|
"github.com/zilliztech/milvus-distributed/internal/util/flowgraph"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -11,6 +12,9 @@ type dataSyncService struct {
|
|||||||
ctx context.Context
|
ctx context.Context
|
||||||
fg *flowgraph.TimeTickedFlowGraph
|
fg *flowgraph.TimeTickedFlowGraph
|
||||||
|
|
||||||
|
dmStream msgstream.MsgStream
|
||||||
|
ddStream msgstream.MsgStream
|
||||||
|
|
||||||
replica collectionReplica
|
replica collectionReplica
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,8 +44,8 @@ func (dsService *dataSyncService) initNodes() {
|
|||||||
|
|
||||||
dsService.fg = flowgraph.NewTimeTickedFlowGraph(dsService.ctx)
|
dsService.fg = flowgraph.NewTimeTickedFlowGraph(dsService.ctx)
|
||||||
|
|
||||||
var dmStreamNode node = newDmInputNode(dsService.ctx)
|
var dmStreamNode node = dsService.newDmInputNode(dsService.ctx)
|
||||||
var ddStreamNode node = newDDInputNode(dsService.ctx)
|
var ddStreamNode node = dsService.newDDInputNode(dsService.ctx)
|
||||||
|
|
||||||
var filterDmNode node = newFilteredDmNode()
|
var filterDmNode node = newFilteredDmNode()
|
||||||
var ddNode node = newDDNode(dsService.replica)
|
var ddNode node = newDDNode(dsService.replica)
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
"log"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
type deleteNode struct {
|
type deleteNode struct {
|
||||||
baseNode
|
baseNode
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
"log"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
type key2SegNode struct {
|
type key2SegNode struct {
|
||||||
baseNode
|
baseNode
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/zilliztech/milvus-distributed/internal/msgstream"
|
"github.com/zilliztech/milvus-distributed/internal/msgstream"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -7,7 +7,7 @@ import (
|
|||||||
"github.com/zilliztech/milvus-distributed/internal/util/flowgraph"
|
"github.com/zilliztech/milvus-distributed/internal/util/flowgraph"
|
||||||
)
|
)
|
||||||
|
|
||||||
func newDmInputNode(ctx context.Context) *flowgraph.InputNode {
|
func (dsService *dataSyncService) newDmInputNode(ctx context.Context) *flowgraph.InputNode {
|
||||||
receiveBufSize := Params.InsertReceiveBufSize
|
receiveBufSize := Params.InsertReceiveBufSize
|
||||||
pulsarBufSize := Params.InsertPulsarBufSize
|
pulsarBufSize := Params.InsertPulsarBufSize
|
||||||
|
|
||||||
@ -22,6 +22,7 @@ func newDmInputNode(ctx context.Context) *flowgraph.InputNode {
|
|||||||
insertStream.CreatePulsarConsumers(consumeChannels, consumeSubName, unmarshalDispatcher, pulsarBufSize)
|
insertStream.CreatePulsarConsumers(consumeChannels, consumeSubName, unmarshalDispatcher, pulsarBufSize)
|
||||||
|
|
||||||
var stream msgstream.MsgStream = insertStream
|
var stream msgstream.MsgStream = insertStream
|
||||||
|
dsService.dmStream = stream
|
||||||
|
|
||||||
maxQueueLength := Params.FlowGraphMaxQueueLength
|
maxQueueLength := Params.FlowGraphMaxQueueLength
|
||||||
maxParallelism := Params.FlowGraphMaxParallelism
|
maxParallelism := Params.FlowGraphMaxParallelism
|
||||||
@ -30,7 +31,7 @@ func newDmInputNode(ctx context.Context) *flowgraph.InputNode {
|
|||||||
return node
|
return node
|
||||||
}
|
}
|
||||||
|
|
||||||
func newDDInputNode(ctx context.Context) *flowgraph.InputNode {
|
func (dsService *dataSyncService) newDDInputNode(ctx context.Context) *flowgraph.InputNode {
|
||||||
receiveBufSize := Params.DDReceiveBufSize
|
receiveBufSize := Params.DDReceiveBufSize
|
||||||
pulsarBufSize := Params.DDPulsarBufSize
|
pulsarBufSize := Params.DDPulsarBufSize
|
||||||
|
|
||||||
@ -45,6 +46,7 @@ func newDDInputNode(ctx context.Context) *flowgraph.InputNode {
|
|||||||
ddStream.CreatePulsarConsumers(consumeChannels, consumeSubName, unmarshalDispatcher, pulsarBufSize)
|
ddStream.CreatePulsarConsumers(consumeChannels, consumeSubName, unmarshalDispatcher, pulsarBufSize)
|
||||||
|
|
||||||
var stream msgstream.MsgStream = ddStream
|
var stream msgstream.MsgStream = ddStream
|
||||||
|
dsService.ddStream = stream
|
||||||
|
|
||||||
maxQueueLength := Params.FlowGraphMaxQueueLength
|
maxQueueLength := Params.FlowGraphMaxQueueLength
|
||||||
maxParallelism := Params.FlowGraphMaxParallelism
|
maxParallelism := Params.FlowGraphMaxParallelism
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import "github.com/zilliztech/milvus-distributed/internal/util/flowgraph"
|
import "github.com/zilliztech/milvus-distributed/internal/util/flowgraph"
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
"log"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
#cgo CFLAGS: -I${SRCDIR}/../core/output/include
|
#cgo CFLAGS: -I${SRCDIR}/../core/output/include
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
"log"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
#cgo CFLAGS: -I${SRCDIR}/../core/output/include
|
#cgo CFLAGS: -I${SRCDIR}/../core/output/include
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
#cgo CFLAGS: -I${SRCDIR}/../core/output/include
|
#cgo CFLAGS: -I${SRCDIR}/../core/output/include
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import "C"
|
import "C"
|
||||||
import (
|
import (
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"sync"
|
"sync"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
package querynodeimp
|
package querynode
|
||||||
|
|
||||||
import "github.com/zilliztech/milvus-distributed/internal/util/typeutil"
|
import "github.com/zilliztech/milvus-distributed/internal/util/typeutil"
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user