enhance: remove all unnecessary string formatting (#29323)

done by two regex expressions:
- `PanicInfo\((.+),[. \n]+fmt::format\(([.\s\S]+?)\)\)`
- `AssertInfo\((.+),[. \n]+fmt::format\(([.\s\S]+?)\)\)`

related: #28811

---------

Signed-off-by: yah01 <yang.cen@zilliz.com>
This commit is contained in:
yah01 2023-12-20 10:04:43 +08:00 committed by GitHub
parent bcf8f27aa7
commit 8f89e9cf75
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 256 additions and 272 deletions

View File

@ -48,9 +48,9 @@ template <typename ArrayType, arrow::Type::type ArrayDataType>
std::pair<const void*, int64_t>
GetDataInfoFromArray(const std::shared_ptr<arrow::Array> array) {
AssertInfo(array->type()->id() == ArrayDataType,
fmt::format("inconsistent data type, expected {}, actual {}",
ArrayDataType,
array->type()->id()));
"inconsistent data type, expected {}, actual {}",
ArrayDataType,
array->type()->id());
auto typed_array = std::dynamic_pointer_cast<ArrayType>(array);
auto element_count = array->length();

View File

@ -95,8 +95,7 @@ datatype_name(DataType data_type) {
return "vector_float16";
}
default: {
PanicInfo(DataTypeInvalid,
fmt::format("Unsupported DataType({})", data_type));
PanicInfo(DataTypeInvalid, "Unsupported DataType({})", data_type);
}
}
}

View File

@ -96,8 +96,8 @@ PhyBinaryArithOpEvalRangeExpr::Eval(EvalCtx& context, VectorPtr& result) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr_->column_.data_type_));
"unsupported data type: {}",
expr_->column_.data_type_);
}
}
@ -262,9 +262,9 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson() {
}
default:
PanicInfo(OpTypeInvalid,
fmt::format("unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type));
"unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type);
}
};
int64_t processed_size = ProcessDataChunks<milvus::Json>(execute_sub_batch,
@ -274,10 +274,10 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson() {
right_operand,
pointer);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -415,19 +415,19 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForArray() {
}
default:
PanicInfo(OpTypeInvalid,
fmt::format("unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type));
"unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type);
}
};
int64_t processed_size = ProcessDataChunks<milvus::ArrayView>(
execute_sub_batch, std::nullptr_t{}, res, value, right_operand, index);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -581,18 +581,18 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForIndex() {
}
default:
PanicInfo(OpTypeInvalid,
fmt::format("unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type));
"unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type);
}
return res;
};
auto res = ProcessIndexChunks<T>(execute_sub_batch, value, right_operand);
AssertInfo(res.size() == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
res.size(),
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
res.size(),
real_batch_size);
return std::make_shared<ColumnVector>(std::move(res));
}
@ -729,18 +729,18 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForData() {
}
default:
PanicInfo(OpTypeInvalid,
fmt::format("unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type));
"unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type);
}
};
int64_t processed_size = ProcessDataChunks<T>(
execute_sub_batch, std::nullptr_t{}, res, value, right_operand);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}

View File

@ -110,8 +110,8 @@ PhyBinaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr_->column_.data_type_));
"unsupported data type: {}",
expr_->column_.data_type_);
}
}
@ -202,10 +202,10 @@ PhyBinaryRangeFilterExpr::ExecRangeVisitorImplForIndex() {
};
auto res = ProcessIndexChunks<T>(execute_sub_batch, val1, val2);
AssertInfo(res.size() == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
res.size(),
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
res.size(),
real_batch_size);
return std::make_shared<ColumnVector>(std::move(res));
}
@ -277,10 +277,10 @@ PhyBinaryRangeFilterExpr::ExecRangeVisitorImplForData() {
int64_t processed_size = ProcessDataChunks<T>(
execute_sub_batch, skip_index_func, res, val1, val2);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -327,10 +327,10 @@ PhyBinaryRangeFilterExpr::ExecRangeVisitorImplForJson() {
int64_t processed_size = ProcessDataChunks<milvus::Json>(
execute_sub_batch, std::nullptr_t{}, res, val1, val2);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -381,10 +381,10 @@ PhyBinaryRangeFilterExpr::ExecRangeVisitorImplForArray() {
int64_t processed_size = ProcessDataChunks<milvus::ArrayView>(
execute_sub_batch, std::nullptr_t{}, res, val1, val2, index);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}

View File

@ -104,8 +104,7 @@ PhyCompareFilterExpr::GetChunkData(DataType data_type,
return GetChunkData<std::string>(field_id, chunk_id, data_barrier);
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}", data_type));
PanicInfo(DataTypeInvalid, "unsupported data type: {}", data_type);
}
}
@ -194,8 +193,7 @@ PhyCompareFilterExpr::ExecCompareExprDispatcherForHybridSegment() {
// case OpType::PostfixMatch: {
// }
default: {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported optype: {}", expr_->op_type_));
PanicInfo(OpTypeInvalid, "unsupported optype: {}", expr_->op_type_);
}
}
}
@ -308,10 +306,10 @@ PhyCompareFilterExpr::ExecCompareRightType() {
int64_t processed_size =
ProcessBothDataChunks<T, U>(execute_sub_batch, res);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
};

View File

@ -33,8 +33,8 @@ PhyExistsFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr_->column_.data_type_));
"unsupported data type: {}",
expr_->column_.data_type_);
}
}
@ -61,10 +61,10 @@ PhyExistsFilterExpr::EvalJsonExistsForDataSegment() {
int64_t processed_size = ProcessDataChunks<Json>(
execute_sub_batch, std::nullptr_t{}, res, pointer);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}

View File

@ -34,8 +34,8 @@ PhyJsonContainsFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr_->column_.data_type_));
"unsupported data type: {}",
expr_->column_.data_type_);
}
}
@ -86,8 +86,8 @@ PhyJsonContainsFilterExpr::EvalJsonContainsForDataSegment() {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type:{}",
val_type));
"unsupported data type:{}",
val_type);
}
} else {
return ExecJsonContainsWithDiffType();
@ -137,8 +137,8 @@ PhyJsonContainsFilterExpr::EvalJsonContainsForDataSegment() {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type:{}",
val_type));
"unsupported data type:{}",
val_type);
}
} else {
return ExecJsonContainsAllWithDiffType();
@ -148,9 +148,8 @@ PhyJsonContainsFilterExpr::EvalJsonContainsForDataSegment() {
}
default:
PanicInfo(ExprInvalid,
fmt::format("unsupported json contains type {}",
proto::plan::JSONContainsExpr_JSONOp_Name(
expr_->op_)));
"unsupported json contains type {}",
proto::plan::JSONContainsExpr_JSONOp_Name(expr_->op_));
}
}
@ -196,10 +195,10 @@ PhyJsonContainsFilterExpr::ExecArrayContains() {
int64_t processed_size = ProcessDataChunks<milvus::ArrayView>(
execute_sub_batch, std::nullptr_t{}, res, elements);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -253,10 +252,10 @@ PhyJsonContainsFilterExpr::ExecJsonContains() {
int64_t processed_size = ProcessDataChunks<Json>(
execute_sub_batch, std::nullptr_t{}, res, pointer, elements);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -314,10 +313,10 @@ PhyJsonContainsFilterExpr::ExecJsonContainsArray() {
int64_t processed_size = ProcessDataChunks<milvus::Json>(
execute_sub_batch, std::nullptr_t{}, res, pointer, elements);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -366,10 +365,10 @@ PhyJsonContainsFilterExpr::ExecArrayContainsAll() {
int64_t processed_size = ProcessDataChunks<milvus::ArrayView>(
execute_sub_batch, std::nullptr_t{}, res, elements);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -426,10 +425,10 @@ PhyJsonContainsFilterExpr::ExecJsonContainsAll() {
int64_t processed_size = ProcessDataChunks<Json>(
execute_sub_batch, std::nullptr_t{}, res, pointer, elements);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -551,10 +550,10 @@ PhyJsonContainsFilterExpr::ExecJsonContainsAllWithDiffType() {
elements,
elements_index);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -617,10 +616,10 @@ PhyJsonContainsFilterExpr::ExecJsonContainsAllArray() {
int64_t processed_size = ProcessDataChunks<Json>(
execute_sub_batch, std::nullptr_t{}, res, pointer, elements);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -729,10 +728,10 @@ PhyJsonContainsFilterExpr::ExecJsonContainsWithDiffType() {
int64_t processed_size = ProcessDataChunks<Json>(
execute_sub_batch, std::nullptr_t{}, res, pointer, elements);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}

View File

@ -22,8 +22,8 @@ namespace exec {
void
PhyLogicalBinaryExpr::Eval(EvalCtx& context, VectorPtr& result) {
AssertInfo(inputs_.size() == 2,
fmt::format("logical binary expr must has two input, but now {}",
inputs_.size()));
"logical binary expr must has two input, but now {}",
inputs_.size());
VectorPtr left;
inputs_[0]->Eval(context, left);
VectorPtr right;
@ -41,8 +41,8 @@ PhyLogicalBinaryExpr::Eval(EvalCtx& context, VectorPtr& result) {
func(ldata, rdata, size);
} else {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported logical operator: {}",
expr_->GetOpTypeString()));
"unsupported logical operator: {}",
expr_->GetOpTypeString());
}
result = std::move(left);
}

View File

@ -40,8 +40,7 @@ struct LogicalElementFunc {
} else if constexpr (op == LogicalOpType::Or) {
milvus::simd::or_bool(left, right, n);
} else {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported logical operator: {}", op));
PanicInfo(OpTypeInvalid, "unsupported logical operator: {}", op);
}
#else
for (size_t i = 0; i < n; ++i) {
@ -50,8 +49,8 @@ struct LogicalElementFunc {
} else if constexpr (op == LogicalOpType::Or) {
left[i] |= right[i];
} else {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported logical operator: {}", op));
PanicInfo(
OpTypeInvalid, "unsupported logical operator: {}", op);
}
}
#endif

View File

@ -23,8 +23,8 @@ namespace exec {
void
PhyLogicalUnaryExpr::Eval(EvalCtx& context, VectorPtr& result) {
AssertInfo(inputs_.size() == 1,
fmt::format("logical unary expr must has one input, but now {}",
inputs_.size()));
"logical unary expr must has one input, but now {}",
inputs_.size());
inputs_[0]->Eval(context, result);
if (expr_->op_type_ == milvus::expr::LogicalUnaryExpr::OpType::LogicalNot) {

View File

@ -82,8 +82,7 @@ PhyTermFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
result = ExecVisitorImplTemplateJson<std::string>();
break;
default:
PanicInfo(DataTypeInvalid,
fmt::format("unknown data type: {}", type));
PanicInfo(DataTypeInvalid, "unknown data type: {}", type);
}
break;
}
@ -107,15 +106,14 @@ PhyTermFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
result = ExecVisitorImplTemplateArray<std::string>();
break;
default:
PanicInfo(DataTypeInvalid,
fmt::format("unknown data type: {}", type));
PanicInfo(DataTypeInvalid, "unknown data type: {}", type);
}
break;
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr_->column_.data_type_));
"unsupported data type: {}",
expr_->column_.data_type_);
}
}
@ -138,8 +136,7 @@ PhyTermFilterExpr::InitPkCacheOffset() {
break;
}
default: {
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}", pk_type_));
PanicInfo(DataTypeInvalid, "unsupported data type {}", pk_type_);
}
}
@ -244,10 +241,10 @@ PhyTermFilterExpr::ExecTermArrayVariableInField() {
int64_t processed_size = ProcessDataChunks<milvus::ArrayView>(
execute_sub_batch, std::nullptr_t{}, res, target_val);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -305,10 +302,10 @@ PhyTermFilterExpr::ExecTermArrayFieldInVariable() {
int64_t processed_size = ProcessDataChunks<milvus::ArrayView>(
execute_sub_batch, std::nullptr_t{}, res, index, term_set);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -358,10 +355,10 @@ PhyTermFilterExpr::ExecTermJsonVariableInField() {
int64_t processed_size = ProcessDataChunks<milvus::Json>(
execute_sub_batch, std::nullptr_t{}, res, pointer, val);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -421,10 +418,10 @@ PhyTermFilterExpr::ExecTermJsonFieldInVariable() {
int64_t processed_size = ProcessDataChunks<milvus::Json>(
execute_sub_batch, std::nullptr_t{}, res, pointer, term_set);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -468,10 +465,10 @@ PhyTermFilterExpr::ExecVisitorImplForIndex() {
};
auto res = ProcessIndexChunks<T>(execute_sub_batch, vals);
AssertInfo(res.size() == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
res.size(),
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
res.size(),
real_batch_size);
return std::make_shared<ColumnVector>(std::move(res));
}
@ -529,10 +526,10 @@ PhyTermFilterExpr::ExecVisitorImplForData() {
int64_t processed_size = ProcessDataChunks<T>(
execute_sub_batch, std::nullptr_t{}, res, vals_set);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}

View File

@ -78,8 +78,8 @@ PhyUnaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
result = ExecRangeVisitorImplJson<proto::plan::Array>();
break;
default:
PanicInfo(DataTypeInvalid,
fmt::format("unknown data type: {}", val_type));
PanicInfo(
DataTypeInvalid, "unknown data type: {}", val_type);
}
break;
}
@ -102,15 +102,15 @@ PhyUnaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
result = ExecRangeVisitorImplArray<proto::plan::Array>();
break;
default:
PanicInfo(DataTypeInvalid,
fmt::format("unknown data type: {}", val_type));
PanicInfo(
DataTypeInvalid, "unknown data type: {}", val_type);
}
break;
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr_->column_.data_type_));
"unsupported data type: {}",
expr_->column_.data_type_);
}
}
@ -189,10 +189,10 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplArray() {
int64_t processed_size = ProcessDataChunks<milvus::ArrayView>(
execute_sub_batch, std::nullptr_t{}, res, val, index);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -343,10 +343,10 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplJson() {
int64_t processed_size = ProcessDataChunks<milvus::Json>(
execute_sub_batch, std::nullptr_t{}, res, val);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}
@ -425,10 +425,10 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplForIndex() {
auto val = GetValueFromProto<IndexInnerType>(expr_->val_);
auto res = ProcessIndexChunks<T>(execute_sub_batch, val);
AssertInfo(res.size() == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
res.size(),
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
res.size(),
real_batch_size);
return std::make_shared<ColumnVector>(std::move(res));
}
@ -498,8 +498,8 @@ PhyUnaryRangeFilterExpr::PreCheckOverflow() {
}
default: {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported range node {}",
expr_->op_type_));
"unsupported range node {}",
expr_->op_type_);
}
}
}
@ -582,10 +582,10 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplForData() {
int64_t processed_size =
ProcessDataChunks<T>(execute_sub_batch, skip_index_func, res, val);
AssertInfo(processed_size == real_batch_size,
fmt::format("internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size));
"internal error: expr processed rows {} not equal "
"expect batch size {}",
processed_size,
real_batch_size);
return res_vec;
}

View File

@ -122,9 +122,9 @@ struct UnaryElementFuncForArray {
UnaryArrayCompare(milvus::query::Match(array_data, val, op));
} else {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported op_type:{} for "
"UnaryElementFuncForArray",
op));
"unsupported op_type:{} for "
"UnaryElementFuncForArray",
op);
}
}
}

View File

@ -100,8 +100,8 @@ CompareTwoJsonArray(T arr1, const proto::plan::Array& arr2) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
arr2.array(i).val_case()));
"unsupported data type {}",
arr2.array(i).val_case());
}
i++;
}
@ -143,8 +143,8 @@ GetValueFromProtoInternal(const milvus::proto::plan::GenericValue& value_proto,
return static_cast<T>(value_proto);
} else {
PanicInfo(Unsupported,
fmt::format("unsupported generic value {}",
value_proto.DebugString()));
"unsupported generic value {}",
value_proto.DebugString());
}
}

View File

@ -147,7 +147,8 @@ VectorMemIndex<T>::Serialize(const Config& config) {
auto stat = index_.Serialize(ret);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::UnexpectedError,
"failed to serialize index, " + KnowhereStatusString(stat));
"failed to serialize index: {}",
KnowhereStatusString(stat));
Disassemble(ret);
return ret;
@ -160,7 +161,8 @@ VectorMemIndex<T>::LoadWithoutAssemble(const BinarySet& binary_set,
auto stat = index_.Deserialize(binary_set, config);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::UnexpectedError,
"failed to Deserialize index, " + KnowhereStatusString(stat));
"failed to Deserialize index: {}",
KnowhereStatusString(stat));
SetDim(index_.Dim());
}
@ -409,8 +411,8 @@ VectorMemIndex<T>::BuildV2(const Config& config) {
auto res = space_->ScanData();
if (!res.ok()) {
PanicInfo(IndexBuildError,
fmt::format("failed to create scan iterator: {}",
res.status().ToString()));
"failed to create scan iterator: {}",
res.status().ToString());
}
auto reader = res.value();
@ -418,8 +420,8 @@ VectorMemIndex<T>::BuildV2(const Config& config) {
for (auto rec : *reader) {
if (!rec.ok()) {
PanicInfo(IndexBuildError,
fmt::format("failed to read data: {}",
rec.status().ToString()));
"failed to read data: {}",
rec.status().ToString());
}
auto data = rec.ValueUnsafe();
if (data == nullptr) {
@ -538,9 +540,9 @@ VectorMemIndex<T>::Query(const DatasetPtr dataset,
milvus::tracer::AddEvent("finish_knowhere_index_range_search");
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
fmt::format("failed to range search: {}: {}",
KnowhereStatusString(res.error()),
res.what()));
"failed to range search: {}: {}",
KnowhereStatusString(res.error()),
res.what());
}
auto result = ReGenRangeSearchResult(
res.value(), topk, num_queries, GetMetricType());
@ -552,9 +554,9 @@ VectorMemIndex<T>::Query(const DatasetPtr dataset,
milvus::tracer::AddEvent("finish_knowhere_index_search");
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
fmt::format("failed to search: {}: {}",
KnowhereStatusString(res.error()),
res.what()));
"failed to search: {}: {}",
KnowhereStatusString(res.error()),
res.what());
}
return res.value();
}
@ -717,8 +719,8 @@ VectorMemIndex<T>::LoadFromFile(const Config& config) {
auto stat = index_.DeserializeFromFile(filepath.value(), conf);
if (stat != knowhere::Status::success) {
PanicInfo(ErrorCode::UnexpectedError,
fmt::format("failed to Deserialize index: {}",
KnowhereStatusString(stat)));
"failed to Deserialize index: {}",
KnowhereStatusString(stat));
}
auto dim = index_.Dim();
@ -726,9 +728,9 @@ VectorMemIndex<T>::LoadFromFile(const Config& config) {
auto ok = unlink(filepath->data());
AssertInfo(ok == 0,
fmt::format("failed to unlink mmap index file {}: {}",
filepath.value(),
strerror(errno)));
"failed to unlink mmap index file {}: {}",
filepath.value(),
strerror(errno));
LOG_SEGCORE_INFO_ << "load vector index done";
}
@ -821,8 +823,8 @@ VectorMemIndex<T>::LoadFromFileV2(const Config& config) {
auto stat = index_.DeserializeFromFile(filepath.value(), conf);
if (stat != knowhere::Status::success) {
PanicInfo(DataFormatBroken,
fmt::format("failed to Deserialize index: {}",
KnowhereStatusString(stat)));
"failed to Deserialize index: {}",
KnowhereStatusString(stat));
}
auto dim = index_.Dim();
@ -830,9 +832,9 @@ VectorMemIndex<T>::LoadFromFileV2(const Config& config) {
auto ok = unlink(filepath->data());
AssertInfo(ok == 0,
fmt::format("failed to unlink mmap index file {}: {}",
filepath.value(),
strerror(errno)));
"failed to unlink mmap index file {}: {}",
filepath.value(),
strerror(errno));
LOG_SEGCORE_INFO_ << "load vector index done";
}
template class VectorMemIndex<float>;

View File

@ -194,15 +194,15 @@ CreateIndexV2(CIndex* res_index, CBuildIndexInfo c_build_index_info) {
milvus_storage::Options{nullptr,
build_index_info->data_store_version});
AssertInfo(store_space.ok() && store_space.has_value(),
fmt::format("create space failed: {}",
store_space.status().ToString()));
"create space failed: {}",
store_space.status().ToString());
auto index_space = milvus_storage::Space::Open(
build_index_info->index_store_path,
milvus_storage::Options{.schema = store_space.value()->schema()});
AssertInfo(index_space.ok() && index_space.has_value(),
fmt::format("create space failed: {}",
index_space.status().ToString()));
"create space failed: {}",
index_space.status().ToString());
LOG_SEGCORE_INFO_ << "init space success";
auto chunk_manager = milvus::storage::CreateChunkManager(

View File

@ -66,8 +66,8 @@ FillField(DataType data_type, const FieldDataPtr data, void* dst) {
break;
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("not supported data type {}", data_type));
PanicInfo(
DataTypeInvalid, "not supported data type {}", data_type);
}
} else {
memcpy(dst, data->Data(), data->Size());
@ -126,8 +126,8 @@ WriteFieldData(File& file,
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("not supported data type {}",
datatype_name(data_type)));
"not supported data type {}",
datatype_name(data_type));
}
} else {
total_written += file.Write(data->Data(), data->Size());

View File

@ -474,8 +474,8 @@ ProtoParser::ParseBinaryRangeExpr(const proto::plan::BinaryRangeExpr& expr_pb) {
}
default: {
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}", data_type));
PanicInfo(
DataTypeInvalid, "unsupported data type {}", data_type);
}
}
}();
@ -633,8 +633,8 @@ ProtoParser::ParseTermExpr(const proto::plan::TermExpr& expr_pb) {
}
}
default: {
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}", data_type));
PanicInfo(
DataTypeInvalid, "unsupported data type {}", data_type);
}
}
}();
@ -710,9 +710,8 @@ ProtoParser::ParseBinaryArithOpEvalRangeExpr(
field_id, data_type, expr_pb);
default:
PanicInfo(DataTypeInvalid,
fmt::format(
"unsupported data type {} in expression",
expr_pb.value().val_case()));
"unsupported data type {} in expression",
expr_pb.value().val_case());
}
}
case DataType::ARRAY: {
@ -725,14 +724,13 @@ ProtoParser::ParseBinaryArithOpEvalRangeExpr(
field_id, data_type, expr_pb);
default:
PanicInfo(DataTypeInvalid,
fmt::format(
"unsupported data type {} in expression",
expr_pb.value().val_case()));
"unsupported data type {} in expression",
expr_pb.value().val_case());
}
}
default: {
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}", data_type));
PanicInfo(
DataTypeInvalid, "unsupported data type {}", data_type);
}
}
}();
@ -781,8 +779,8 @@ ProtoParser::ParseExistExpr(const proto::plan::ExistsExpr& expr_pb) {
return ExtractExistsExprImpl(expr_pb);
}
default: {
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}", data_type));
PanicInfo(
DataTypeInvalid, "unsupported data type {}", data_type);
}
}
}();
@ -981,8 +979,7 @@ ProtoParser::ParseExpr(const proto::plan::Expr& expr_pb) {
default: {
std::string s;
google::protobuf::TextFormat::PrintToString(expr_pb, &s);
PanicInfo(ExprInvalid,
fmt::format("unsupported expr proto node: {}", s));
PanicInfo(ExprInvalid, "unsupported expr proto node: {}", s);
}
}
}

View File

@ -58,8 +58,7 @@ generate_scalar_index(SpanBase data, DataType data_type) {
case DataType::VARCHAR:
return generate_scalar_index(Span<std::string>(data));
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported type {}", data_type));
PanicInfo(DataTypeInvalid, "unsupported type {}", data_type);
}
}

View File

@ -106,9 +106,9 @@ BruteForceSearch(const dataset::SearchDataset& dataset,
milvus::tracer::AddEvent("knowhere_finish_BruteForce_RangeSearch");
if (!res.has_value()) {
PanicInfo(KnowhereError,
fmt::format("failed to range search: {}: {}",
KnowhereStatusString(res.error()),
res.what()));
"failed to range search: {}: {}",
KnowhereStatusString(res.error()),
res.what());
}
auto result =
ReGenRangeSearchResult(res.value(), topk, nq, dataset.metric_type);

View File

@ -124,8 +124,7 @@ ExecExprVisitor::visit(LogicalUnaryExpr& expr) {
break;
}
default: {
PanicInfo(OpTypeInvalid,
fmt::format("Invalid Unary Op {}", expr.op_type_));
PanicInfo(OpTypeInvalid, "Invalid Unary Op {}", expr.op_type_);
}
}
AssertInfo(res.size() == row_count_,
@ -172,8 +171,7 @@ ExecExprVisitor::visit(LogicalBinaryExpr& expr) {
break;
}
default: {
PanicInfo(OpTypeInvalid,
fmt::format("Invalid Binary Op {}", expr.op_type_));
PanicInfo(OpTypeInvalid, "Invalid Binary Op {}", expr.op_type_);
}
}
AssertInfo(res.size() == row_count_,
@ -503,8 +501,7 @@ ExecExprVisitor::ExecUnaryRangeVisitorDispatcherImpl(UnaryRangeExpr& expr_raw)
}
// TODO: PostfixMatch
default: {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported range node {}", op));
PanicInfo(OpTypeInvalid, "unsupported range node {}", op);
}
}
}
@ -617,8 +614,8 @@ CompareTwoJsonArray(T arr1, const proto::plan::Array& arr2) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
arr2.array(i).val_case()));
"unsupported data type {}",
arr2.array(i).val_case());
}
i++;
}
@ -761,8 +758,7 @@ ExecExprVisitor::ExecUnaryRangeVisitorDispatcherJson(UnaryRangeExpr& expr_raw)
}
// TODO: PostfixMatch
default: {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported range node {}", op));
PanicInfo(OpTypeInvalid, "unsupported range node {}", op);
}
}
}
@ -897,8 +893,7 @@ ExecExprVisitor::ExecUnaryRangeVisitorDispatcherArray(UnaryRangeExpr& expr_raw)
}
// TODO: PostfixMatch
default: {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported range node {}", op));
PanicInfo(OpTypeInvalid, "unsupported range node {}", op);
}
}
}
@ -1874,8 +1869,8 @@ ExecExprVisitor::visit(UnaryRangeExpr& expr) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr.column_.data_type));
"unsupported data type: {}",
expr.column_.data_type);
}
AssertInfo(res.size() == row_count_,
"[ExecExprVisitor]Size of results not equal row count");
@ -1963,8 +1958,8 @@ ExecExprVisitor::visit(BinaryArithOpEvalRangeExpr& expr) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr.column_.data_type));
"unsupported data type: {}",
expr.column_.data_type);
}
AssertInfo(res.size() == row_count_,
"[ExecExprVisitor]Size of results not equal row count");
@ -2064,8 +2059,8 @@ ExecExprVisitor::visit(BinaryRangeExpr& expr) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type: {}",
expr.column_.data_type));
"unsupported data type: {}",
expr.column_.data_type);
}
AssertInfo(res.size() == row_count_,
"[ExecExprVisitor]Size of results not equal row count");
@ -2481,8 +2476,8 @@ ExecExprVisitor::ExecCompareExprDispatcher(CompareExpr& expr, Op op)
}
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}", type));
PanicInfo(
DataTypeInvalid, "unsupported data type {}", type);
}
};
auto left = getChunkData(
@ -2550,8 +2545,7 @@ ExecExprVisitor::visit(CompareExpr& expr) {
// case OpType::PostfixMatch: {
// }
default: {
PanicInfo(OpTypeInvalid,
fmt::format("unsupported optype {}", expr.op_type_));
PanicInfo(OpTypeInvalid, "unsupported optype {}", expr.op_type_);
}
}
AssertInfo(res.size() == row_count_,
@ -2958,8 +2952,8 @@ ExecExprVisitor::visit(TermExpr& expr) {
break;
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
expr.val_case_));
"unsupported data type {}",
expr.val_case_);
}
break;
}
@ -2989,8 +2983,8 @@ ExecExprVisitor::visit(TermExpr& expr) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
expr.column_.data_type));
"unsupported data type {}",
expr.column_.data_type);
}
AssertInfo(res.size() == row_count_,
"[ExecExprVisitor]Size of results not equal row count");
@ -3025,8 +3019,8 @@ ExecExprVisitor::visit(ExistsExpr& expr) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
expr.column_.data_type));
"unsupported data type {}",
expr.column_.data_type);
}
AssertInfo(res.size() == row_count_,
"[ExecExprVisitor]Size of results not equal row count");
@ -3223,8 +3217,8 @@ ExecExprVisitor::ExecJsonContainsWithDiffType(JsonContainsExpr& expr_raw)
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
element.val_case()));
"unsupported data type {}",
element.val_case());
}
}
}
@ -3449,8 +3443,8 @@ ExecExprVisitor::ExecJsonContainsAllWithDiffType(JsonContainsExpr& expr_raw)
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
element.val_case()));
"unsupported data type {}",
element.val_case());
}
if (tmp_elements_index.size() == 0) {
return true;
@ -3502,8 +3496,8 @@ ExecExprVisitor::visit(JsonContainsExpr& expr) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
expr.val_case_));
"unsupported data type {}",
expr.val_case_);
}
} else {
if (expr.same_type_) {
@ -3530,8 +3524,8 @@ ExecExprVisitor::visit(JsonContainsExpr& expr) {
}
default:
PanicInfo(Unsupported,
fmt::format("unsupported value type {}",
expr.val_case_));
"unsupported value type {}",
expr.val_case_);
}
} else {
res = ExecJsonContainsWithDiffType(expr);
@ -3560,8 +3554,8 @@ ExecExprVisitor::visit(JsonContainsExpr& expr) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
expr.val_case_));
"unsupported data type {}",
expr.val_case_);
}
} else {
if (expr.same_type_) {
@ -3601,8 +3595,8 @@ ExecExprVisitor::visit(JsonContainsExpr& expr) {
}
default:
PanicInfo(DataTypeInvalid,
fmt::format("unsupported json contains type {}",
expr.val_case_));
"unsupported json contains type {}",
expr.val_case_);
}
AssertInfo(res.size() == row_count_,
"[ExecExprVisitor]Size of results not equal row count");