mirror of
https://gitee.com/milvus-io/milvus.git
synced 2026-01-04 01:42:15 +08:00
add new api for binary vector
Former-commit-id: e450d463c9bb6473f6da93602bdf306dd48c65f7
This commit is contained in:
parent
a5682c301f
commit
167ae9c22c
@ -18,98 +18,104 @@ namespace zilliz {
|
||||
namespace vecwise {
|
||||
namespace server {
|
||||
|
||||
namespace {
|
||||
class TimeRecordWrapper {
|
||||
public:
|
||||
TimeRecordWrapper(const std::string& func_name)
|
||||
: recorder_(func_name), func_name_(func_name) {
|
||||
SERVER_LOG_TRACE << func_name << " called";
|
||||
}
|
||||
|
||||
~TimeRecordWrapper() {
|
||||
recorder_.Elapse("cost");
|
||||
SERVER_LOG_TRACE << func_name_ << " finished";
|
||||
}
|
||||
|
||||
private:
|
||||
TimeRecorder recorder_;
|
||||
std::string func_name_;
|
||||
};
|
||||
void TimeRecord(const std::string& func_name) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
VecServiceHandler::add_group(const VecGroup &group) {
|
||||
SERVER_LOG_INFO << "add_group() called";
|
||||
TimeRecordWrapper rc("add_group()");
|
||||
SERVER_LOG_TRACE << "group.id = " << group.id << ", group.dimension = " << group.dimension
|
||||
<< ", group.index_type = " << group.index_type;
|
||||
|
||||
BaseTaskPtr task_ptr = AddGroupTask::Create(group.dimension, group.id);
|
||||
VecServiceScheduler& scheduler = VecServiceScheduler::GetInstance();
|
||||
scheduler.ExecuteTask(task_ptr);
|
||||
|
||||
SERVER_LOG_INFO << "add_group() finished";
|
||||
}
|
||||
|
||||
void
|
||||
VecServiceHandler::get_group(VecGroup &_return, const std::string &group_id) {
|
||||
SERVER_LOG_INFO << "get_group() called";
|
||||
TimeRecordWrapper rc("get_group()");
|
||||
SERVER_LOG_TRACE << "group_id = " << group_id;
|
||||
|
||||
_return.id = group_id;
|
||||
BaseTaskPtr task_ptr = GetGroupTask::Create(group_id, _return.dimension);
|
||||
VecServiceScheduler& scheduler = VecServiceScheduler::GetInstance();
|
||||
scheduler.ExecuteTask(task_ptr);
|
||||
|
||||
SERVER_LOG_INFO << "get_group() finished";
|
||||
}
|
||||
|
||||
void
|
||||
VecServiceHandler::del_group(const std::string &group_id) {
|
||||
SERVER_LOG_INFO << "del_group() called";
|
||||
TimeRecordWrapper rc("del_group()");
|
||||
SERVER_LOG_TRACE << "group_id = " << group_id;
|
||||
|
||||
BaseTaskPtr task_ptr = DeleteGroupTask::Create(group_id);
|
||||
VecServiceScheduler& scheduler = VecServiceScheduler::GetInstance();
|
||||
scheduler.ExecuteTask(task_ptr);
|
||||
|
||||
SERVER_LOG_INFO << "del_group() not implemented";
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
VecServiceHandler::add_vector(const std::string &group_id, const VecTensor &tensor) {
|
||||
SERVER_LOG_INFO << "add_vector() called";
|
||||
TimeRecordWrapper rc("add_vector()");
|
||||
SERVER_LOG_TRACE << "group_id = " << group_id << ", vector size = " << tensor.tensor.size();
|
||||
|
||||
BaseTaskPtr task_ptr = AddVectorTask::Create(group_id, &tensor);
|
||||
VecServiceScheduler& scheduler = VecServiceScheduler::GetInstance();
|
||||
scheduler.ExecuteTask(task_ptr);
|
||||
|
||||
SERVER_LOG_INFO << "add_vector() finished";
|
||||
}
|
||||
|
||||
void
|
||||
VecServiceHandler::add_vector_batch(const std::string &group_id,
|
||||
const VecTensorList &tensor_list) {
|
||||
SERVER_LOG_INFO << "add_vector_batch() called";
|
||||
TimeRecordWrapper rc("add_vector_batch()");
|
||||
SERVER_LOG_TRACE << "group_id = " << group_id << ", vector list size = "
|
||||
<< tensor_list.tensor_list.size();
|
||||
TimeRecorder rc("Add VECTOR BATCH");
|
||||
|
||||
BaseTaskPtr task_ptr = AddBatchVectorTask::Create(group_id, &tensor_list);
|
||||
VecServiceScheduler& scheduler = VecServiceScheduler::GetInstance();
|
||||
scheduler.ExecuteTask(task_ptr);
|
||||
rc.Elapse("DONE!");
|
||||
|
||||
SERVER_LOG_INFO << "add_vector_batch() finished";
|
||||
}
|
||||
|
||||
void
|
||||
VecServiceHandler::add_binary_vector(const std::string& group_id,
|
||||
const VecBinaryTensor& tensor) {
|
||||
SERVER_LOG_INFO << "add_vector_batch() called";
|
||||
TimeRecordWrapper rc("add_binary_vector()");
|
||||
SERVER_LOG_TRACE << "group_id = " << group_id << ", vector size = " << tensor.tensor.size()/4;
|
||||
|
||||
BaseTaskPtr task_ptr = AddVectorTask::Create(group_id, &tensor);
|
||||
VecServiceScheduler& scheduler = VecServiceScheduler::GetInstance();
|
||||
scheduler.ExecuteTask(task_ptr);
|
||||
|
||||
SERVER_LOG_INFO << "add_vector_batch() finished";
|
||||
}
|
||||
|
||||
void
|
||||
VecServiceHandler::add_binary_vector_batch(const std::string& group_id,
|
||||
const VecBinaryTensorList& tensor_list) {
|
||||
SERVER_LOG_INFO << "add_vector_batch() called";
|
||||
TimeRecordWrapper rc("add_binary_vector_batch()");
|
||||
SERVER_LOG_TRACE << "group_id = " << group_id << ", vector list size = "
|
||||
<< tensor_list.tensor_list.size();
|
||||
TimeRecorder rc("Add BINARY VECTOR BATCH");
|
||||
|
||||
BaseTaskPtr task_ptr = AddBatchVectorTask::Create(group_id, &tensor_list);
|
||||
VecServiceScheduler& scheduler = VecServiceScheduler::GetInstance();
|
||||
scheduler.ExecuteTask(task_ptr);
|
||||
rc.Elapse("DONE!");
|
||||
|
||||
SERVER_LOG_INFO << "add_vector_batch() finished";
|
||||
}
|
||||
|
||||
void
|
||||
@ -118,7 +124,7 @@ VecServiceHandler::search_vector(VecSearchResult &_return,
|
||||
const int64_t top_k,
|
||||
const VecTensor &tensor,
|
||||
const VecTimeRangeList &time_range_list) {
|
||||
SERVER_LOG_INFO << "search_vector() called";
|
||||
TimeRecordWrapper rc("search_vector()");
|
||||
SERVER_LOG_TRACE << "group_id = " << group_id << ", top_k = " << top_k
|
||||
<< ", vector size = " << tensor.tensor.size()
|
||||
<< ", time range list size = " << time_range_list.range_list.size();
|
||||
@ -135,8 +141,6 @@ VecServiceHandler::search_vector(VecSearchResult &_return,
|
||||
} else {
|
||||
SERVER_LOG_ERROR << "No search result returned";
|
||||
}
|
||||
|
||||
SERVER_LOG_INFO << "search_vector() finished";
|
||||
}
|
||||
|
||||
void
|
||||
@ -145,7 +149,7 @@ VecServiceHandler::search_vector_batch(VecSearchResultList &_return,
|
||||
const int64_t top_k,
|
||||
const VecTensorList &tensor_list,
|
||||
const VecTimeRangeList &time_range_list) {
|
||||
SERVER_LOG_INFO << "search_vector_batch() called";
|
||||
TimeRecordWrapper rc("search_vector_batch()");
|
||||
SERVER_LOG_TRACE << "group_id = " << group_id << ", top_k = " << top_k
|
||||
<< ", vector list size = " << tensor_list.tensor_list.size()
|
||||
<< ", time range list size = " << time_range_list.range_list.size();
|
||||
@ -153,8 +157,6 @@ VecServiceHandler::search_vector_batch(VecSearchResultList &_return,
|
||||
BaseTaskPtr task_ptr = SearchVectorTask::Create(group_id, top_k, tensor_list, time_range_list, _return);
|
||||
VecServiceScheduler& scheduler = VecServiceScheduler::GetInstance();
|
||||
scheduler.ExecuteTask(task_ptr);
|
||||
|
||||
SERVER_LOG_INFO << "search_vector_batch() finished";
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -56,84 +56,64 @@ void ClientApp::Run(const std::string &config_file) {
|
||||
group.index_type = 0;
|
||||
session.interface()->add_group(group);
|
||||
|
||||
const int64_t count = 10000;
|
||||
//add vectors one by one
|
||||
{
|
||||
std::vector<VecTensor> tensor_list;
|
||||
for (int64_t k = 0; k < count; k++) {
|
||||
VecTensor tensor;
|
||||
for (int32_t i = 0; i < dim; i++) {
|
||||
tensor.tensor.push_back((double) (i + k));
|
||||
}
|
||||
tensor.uid = "s_vec_" + std::to_string(k);
|
||||
tensor_list.emplace_back(tensor);
|
||||
//prepare data
|
||||
const int64_t count = 100000;
|
||||
VecTensorList tensor_list;
|
||||
VecBinaryTensorList bin_tensor_list;
|
||||
for (int64_t k = 0; k < count; k++) {
|
||||
VecTensor tensor;
|
||||
tensor.tensor.reserve(dim);
|
||||
VecBinaryTensor bin_tensor;
|
||||
bin_tensor.tensor.resize(dim*sizeof(double));
|
||||
double* d_p = (double*)(const_cast<char*>(bin_tensor.tensor.data()));
|
||||
for (int32_t i = 0; i < dim; i++) {
|
||||
double val = (double)(i + k);
|
||||
tensor.tensor.push_back(val);
|
||||
d_p[i] = val;
|
||||
}
|
||||
|
||||
server::TimeRecorder rc("Add " + std::to_string(count) + " vectors one by one");
|
||||
for (int64_t k = 0; k < count; k++) {
|
||||
session.interface()->add_vector(group.id, tensor_list[k]);
|
||||
CLIENT_LOG_INFO << "add vector no." << k;
|
||||
}
|
||||
rc.Elapse("done!");
|
||||
tensor.uid = "normal_vec_" + std::to_string(k);
|
||||
tensor_list.tensor_list.emplace_back(tensor);
|
||||
|
||||
bin_tensor.uid = "binary_vec_" + std::to_string(k);
|
||||
bin_tensor_list.tensor_list.emplace_back(bin_tensor);
|
||||
}
|
||||
|
||||
//add vectors in one batch
|
||||
{
|
||||
|
||||
VecTensorList vec_list;
|
||||
for (int64_t k = 0; k < count; k++) {
|
||||
VecTensor tensor;
|
||||
for (int32_t i = 0; i < dim; i++) {
|
||||
tensor.tensor.push_back((double) (i + k));
|
||||
}
|
||||
tensor.uid = "m_vec_" + std::to_string(k);
|
||||
vec_list.tensor_list.emplace_back(tensor);
|
||||
}
|
||||
|
||||
server::TimeRecorder rc("Add " + std::to_string(count) + " vectors in one batch");
|
||||
session.interface()->add_vector_batch(group.id, vec_list);
|
||||
rc.Elapse("done!");
|
||||
}
|
||||
|
||||
//add binary vectors one by one
|
||||
{
|
||||
std::vector<VecBinaryTensor> tensor_list;
|
||||
for (int64_t k = 0; k < count; k++) {
|
||||
VecBinaryTensor tensor;
|
||||
tensor.tensor.resize(dim*8);
|
||||
double* d_p = (double*)(const_cast<char*>(tensor.tensor.data()));
|
||||
for (int32_t i = 0; i < dim; i++) {
|
||||
d_p[i] = (double)(i + k);
|
||||
}
|
||||
tensor.uid = "s_vec_" + std::to_string(k);
|
||||
tensor_list.emplace_back(tensor);
|
||||
}
|
||||
|
||||
server::TimeRecorder rc("Add " + std::to_string(count) + " binary vectors one by one");
|
||||
for (int64_t k = 0; k < count; k++) {
|
||||
session.interface()->add_binary_vector(group.id, tensor_list[k]);
|
||||
CLIENT_LOG_INFO << "add vector no." << k;
|
||||
}
|
||||
rc.Elapse("done!");
|
||||
}
|
||||
// //add vectors one by one
|
||||
// {
|
||||
// server::TimeRecorder rc("Add " + std::to_string(count) + " vectors one by one");
|
||||
// for (int64_t k = 0; k < count; k++) {
|
||||
// session.interface()->add_vector(group.id, tensor_list.tensor_list[k]);
|
||||
// if(k%1000 == 0) {
|
||||
// CLIENT_LOG_INFO << "add normal vector no." << k;
|
||||
// }
|
||||
// }
|
||||
// rc.Elapse("done!");
|
||||
// }
|
||||
//
|
||||
// //add vectors in one batch
|
||||
// {
|
||||
// server::TimeRecorder rc("Add " + std::to_string(count) + " vectors in one batch");
|
||||
// session.interface()->add_vector_batch(group.id, tensor_list);
|
||||
// rc.Elapse("done!");
|
||||
// }
|
||||
//
|
||||
// //add binary vectors one by one
|
||||
// {
|
||||
// server::TimeRecorder rc("Add " + std::to_string(count) + " binary vectors one by one");
|
||||
// for (int64_t k = 0; k < count; k++) {
|
||||
// session.interface()->add_binary_vector(group.id, bin_tensor_list.tensor_list[k]);
|
||||
// if(k%1000 == 0) {
|
||||
// CLIENT_LOG_INFO << "add binary vector no." << k;
|
||||
// }
|
||||
// }
|
||||
// rc.Elapse("done!");
|
||||
// }
|
||||
|
||||
//add binary vectors in one batch
|
||||
{
|
||||
|
||||
VecBinaryTensorList vec_list;
|
||||
for (int64_t k = 0; k < count; k++) {
|
||||
VecBinaryTensor tensor;
|
||||
tensor.tensor.resize(dim*8);
|
||||
double* d_p = (double*)(const_cast<char*>(tensor.tensor.data()));
|
||||
for (int32_t i = 0; i < dim; i++) {
|
||||
d_p[i] = (double)(i + k);
|
||||
}
|
||||
tensor.uid = "m_vec_" + std::to_string(k);
|
||||
vec_list.tensor_list.emplace_back(tensor);
|
||||
}
|
||||
|
||||
server::TimeRecorder rc("Add " + std::to_string(count) + " binary vectors in one batch");
|
||||
session.interface()->add_binary_vector_batch(group.id, vec_list);
|
||||
session.interface()->add_binary_vector_batch(group.id, bin_tensor_list);
|
||||
rc.Elapse("done!");
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user