diff --git a/cpp/src/server/VecServiceTask.cpp b/cpp/src/server/VecServiceTask.cpp index 9c996cb3a1..c2435ff51b 100644 --- a/cpp/src/server/VecServiceTask.cpp +++ b/cpp/src/server/VecServiceTask.cpp @@ -488,10 +488,14 @@ ServerError AddBatchVectorTask::OnExecute() { std::list> threads_list; uint64_t begin_index = 0, end_index = USE_MT; - while(end_index < vec_count) { + while(true) { threads_list.push_back( GetThreadPool().enqueue(&AddBatchVectorTask::ProcessIdMapping, this, vector_ids, begin_index, end_index, tensor_ids_)); + if(end_index >= vec_count) { + break; + } + begin_index = end_index; end_index += USE_MT; if(end_index > vec_count) { diff --git a/cpp/test_client/src/ClientTest.cpp b/cpp/test_client/src/ClientTest.cpp index 3a11e8bcc3..188f30a02e 100644 --- a/cpp/test_client/src/ClientTest.cpp +++ b/cpp/test_client/src/ClientTest.cpp @@ -211,7 +211,10 @@ TEST(AddVector, CLIENT_TEST) { server::TimeRecorder rc("Add " + std::to_string(count) + " binary vectors in one batch"); std::vector ids; session.interface()->add_binary_vector_batch(ids, group.id, bin_tensor_list_2); - ASSERT_TRUE(!ids.empty()); + ASSERT_EQ(ids.size(), bin_tensor_list_2.tensor_list.size()); + for(size_t i = 0; i < ids.size(); i++) { + ASSERT_TRUE(!ids[i].empty()); + } rc.Elapse("done!"); } #endif