enhance: rename PanicInfo to ThrowInfo (#43384)

issue: #41435

this is to prevent AI from thinking of our exception throwing as a
dangerous PANIC operation that terminates the program.

Signed-off-by: Buqian Zheng <zhengbuqian@gmail.com>
This commit is contained in:
Buqian Zheng 2025-07-19 20:22:52 +08:00 committed by GitHub
parent 5dc20aaa43
commit 389104d200
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
122 changed files with 507 additions and 507 deletions

View File

@ -295,7 +295,7 @@ cache_slot_count(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_slot_count_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -309,7 +309,7 @@ cache_cell_count(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_cell_count_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -323,7 +323,7 @@ cache_cell_loaded_count(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_cell_loaded_count_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -337,7 +337,7 @@ cache_load_latency(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_load_latency_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -351,7 +351,7 @@ cache_op_result_count_hit(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_op_result_count_hit_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -365,7 +365,7 @@ cache_op_result_count_miss(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_op_result_count_miss_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -379,7 +379,7 @@ cache_cell_eviction_count(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_cell_eviction_count_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -398,7 +398,7 @@ cache_item_lifetime_seconds(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_item_lifetime_seconds_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -412,7 +412,7 @@ cache_load_count_success(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_load_count_success_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -426,7 +426,7 @@ cache_load_count_fail(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_load_count_fail_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}
@ -440,7 +440,7 @@ cache_memory_overhead_bytes(StorageType storage_type) {
case StorageType::MIXED:
return monitor::internal_cache_memory_overhead_bytes_mixed;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
}

View File

@ -281,7 +281,7 @@ DList::tryEvict(const ResourceUsage& expected_eviction,
size_to_evict.file_bytes);
break;
default:
PanicInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
ThrowInfo(ErrorCode::UnexpectedError, "Unknown StorageType");
}
return size_to_evict;
}

View File

@ -308,7 +308,7 @@ KmeansClustering::StreamingAssignandUpload(
dataset->SetIsOwner(true);
auto res = cluster_node.Assign(*dataset);
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
fmt::format("failed to kmeans assign: {}: {}",
KnowhereStatusString(res.error()),
res.what()));
@ -438,7 +438,7 @@ KmeansClustering::Run(const milvus::proto::clustering::AnalyzeInfo& config) {
// return id mapping
auto res = cluster_node.Train(*dataset, train_conf);
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
fmt::format("failed to kmeans train: {}: {}",
KnowhereStatusString(res.error()),
res.what()));
@ -452,7 +452,7 @@ KmeansClustering::Run(const milvus::proto::clustering::AnalyzeInfo& config) {
auto centroids_res = cluster_node.GetCentroids();
if (!centroids_res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
fmt::format("failed to get centroids: {}: {}",
KnowhereStatusString(res.error()),
res.what()));

View File

@ -244,7 +244,7 @@ class Array {
return true;
}
default:
PanicInfo(Unsupported, "unsupported element type for array");
ThrowInfo(Unsupported, "unsupported element type for array");
}
}
@ -282,7 +282,7 @@ class Array {
return static_cast<T>(
reinterpret_cast<double*>(data_.get())[index]);
default:
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"unsupported element type for array");
}
}
@ -524,7 +524,7 @@ class ArrayView {
return static_cast<T>(
reinterpret_cast<double*>(data_)[index]);
default:
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"unsupported element type for array");
}
}

View File

@ -169,7 +169,7 @@ class StringChunk : public Chunk {
std::string_view
operator[](const int i) const {
if (i < 0 || i >= row_nums_) {
PanicInfo(ErrorCode::OutOfRange,
ThrowInfo(ErrorCode::OutOfRange,
"index out of range {} at {}",
i,
row_nums_);
@ -327,7 +327,7 @@ class ArrayChunk : public Chunk {
const char*
ValueAt(int64_t idx) const override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"ArrayChunk::ValueAt is not supported");
}
@ -387,7 +387,7 @@ class VectorArrayChunk : public Chunk {
const char*
ValueAt(int64_t idx) const override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"VectorArrayChunk::ValueAt is not supported");
}

View File

@ -446,7 +446,7 @@ create_chunk_writer(const FieldMeta& field_meta, Args&&... args) {
field_meta.get_element_type(),
std::forward<Args>(args)...);
default:
PanicInfo(Unsupported, "Unsupported data type");
ThrowInfo(Unsupported, "Unsupported data type");
}
}

View File

@ -150,7 +150,7 @@ FailureCStatus(const std::exception* ex) {
#define Assert(expr) AssertInfo((expr), "")
#define PanicInfo(errcode, info, args...) \
#define ThrowInfo(errcode, info, args...) \
do { \
milvus::impl::EasyAssertInfo(false, \
"", \

View File

@ -303,7 +303,7 @@ FieldDataImpl<Type, is_type_entire_row>::FillFieldData(
for (size_t index = 0; index < element_count; ++index) {
VectorFieldProto field_data;
if (array_array->GetString(index) == "") {
PanicInfo(DataTypeInvalid, "empty vector array");
ThrowInfo(DataTypeInvalid, "empty vector array");
}
auto success =
field_data.ParseFromString(array_array->GetString(index));
@ -313,7 +313,7 @@ FieldDataImpl<Type, is_type_entire_row>::FillFieldData(
return FillFieldData(values.data(), element_count);
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
GetName() + "::FillFieldData" +
" not support data type " +
GetDataTypeName(data_type_));
@ -434,7 +434,7 @@ FieldDataImpl<Type, is_type_entire_row>::FillFieldData(
values.data(), valid_data_ptr.get(), element_count, 0);
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
GetName() + "::FillFieldData" +
" not support data type " +
GetDataTypeName(data_type_));
@ -493,7 +493,7 @@ InitScalarFieldData(const DataType& type, bool nullable, int64_t cap_rows) {
case DataType::JSON:
return std::make_shared<FieldData<Json>>(type, nullable, cap_rows);
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"InitScalarFieldData not support data type " +
GetDataTypeName(type));
}

View File

@ -89,7 +89,7 @@ class FieldData<VectorArray> : public FieldDataVectorArrayImpl {
int64_t
get_dim() const override {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"Call get_dim on FieldData<VectorArray> is not supported");
}
};

View File

@ -158,7 +158,7 @@ class FieldBitsetImpl : public FieldDataBase {
// no need to implement for bitset which used in runtime process.
void
FillFieldData(const void* source, ssize_t element_count) override {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"FillFieldData(const void* source, ssize_t element_count)"
"not implemented for bitset");
}
@ -168,7 +168,7 @@ class FieldBitsetImpl : public FieldDataBase {
const uint8_t* valid_data,
ssize_t element_count,
ssize_t offset) override {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"FillFieldData(const void* field_data, "
"const uint8_t* valid_data, ssize_t element_count)"
"not implemented for bitset");
@ -176,14 +176,14 @@ class FieldBitsetImpl : public FieldDataBase {
void
FillFieldData(const std::shared_ptr<arrow::Array> array) override {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"FillFieldData(const std::shared_ptr<arrow::Array>& array) "
"not implemented for bitset");
}
void
FillFieldData(const std::shared_ptr<arrow::ChunkedArray> arrays) override {
PanicInfo(
ThrowInfo(
NotImplemented,
"FillFieldData(const std::shared_ptr<arrow::ChunkedArray>& arrays) "
"not implemented for bitset");
@ -192,7 +192,7 @@ class FieldBitsetImpl : public FieldDataBase {
void
FillFieldData(const std::optional<DefaultValueType> default_value,
ssize_t element_count) override {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"FillFieldData(const const std::optional<DefaultValueType> "
"default_value, "
"ssize_t element_count) not implemented for bitset");
@ -200,14 +200,14 @@ class FieldBitsetImpl : public FieldDataBase {
virtual void
FillFieldData(const std::shared_ptr<arrow::StringArray>& array) {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"FillFieldData(const std::shared_ptr<arrow::StringArray>& "
"array) not implemented for bitset");
}
virtual void
FillFieldData(const std::shared_ptr<arrow::BinaryArray>& array) {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"FillFieldData(const std::shared_ptr<arrow::BinaryArray>& "
"array) not implemented for bitset");
}
@ -224,12 +224,12 @@ class FieldBitsetImpl : public FieldDataBase {
uint8_t*
ValidData() override {
PanicInfo(NotImplemented, "ValidData() not implemented for bitset");
ThrowInfo(NotImplemented, "ValidData() not implemented for bitset");
}
const void*
RawValue(ssize_t offset) const override {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"RawValue(ssize_t offset) not implemented for bitset");
}
@ -302,13 +302,13 @@ class FieldBitsetImpl : public FieldDataBase {
int64_t
get_null_count() const override {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"get_null_count() not implemented for bitset");
}
bool
is_valid(ssize_t offset) const override {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"is_valid(ssize_t offset) not implemented for bitset");
}
@ -344,7 +344,7 @@ class FieldDataImpl : public FieldDataBase {
data_.resize(num_rows_ * dim_);
if (nullable) {
if (IsVectorDataType(data_type)) {
PanicInfo(NotImplemented, "vector type not support null");
ThrowInfo(NotImplemented, "vector type not support null");
}
valid_data_.resize((num_rows_ + 7) / 8, 0xFF);
}
@ -396,14 +396,14 @@ class FieldDataImpl : public FieldDataBase {
virtual void
FillFieldData(const std::shared_ptr<arrow::StringArray>& array) {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"FillFieldData(const std::shared_ptr<arrow::StringArray>& "
"array) not implemented by default");
}
virtual void
FillFieldData(const std::shared_ptr<arrow::BinaryArray>& array) {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"FillFieldData(const std::shared_ptr<arrow::BinaryArray>& "
"array) not implemented by default");
}

View File

@ -58,7 +58,7 @@ FieldMeta::enable_analyzer() const {
TokenizerParams
FieldMeta::get_analyzer_params() const {
if (!enable_analyzer()) {
PanicInfo(
ThrowInfo(
Unsupported,
fmt::format("unsupported text index when not enable analyzer"));
}

View File

@ -23,7 +23,7 @@
namespace milvus {
#define THROW_FILE_WRITE_ERROR(path) \
PanicInfo(ErrorCode::FileWriteFailed, \
ThrowInfo(ErrorCode::FileWriteFailed, \
fmt::format("write data to file {} failed, error code {}", \
path, \
strerror(errno)));

View File

@ -48,7 +48,7 @@ class GroupChunk {
void
AddChunk(FieldId field_id, std::shared_ptr<Chunk> chunk) {
if (chunks_.find(field_id) != chunks_.end()) {
PanicInfo(ErrorCode::FieldAlreadyExist,
ThrowInfo(ErrorCode::FieldAlreadyExist,
"Field {} already exists in GroupChunk",
field_id.get());
}

View File

@ -48,7 +48,7 @@ ExtractSubJson(const std::string& json, const std::vector<std::string>& keys) {
rapidjson::Document doc;
doc.Parse(json.c_str());
if (doc.HasParseError()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"json parse failed, error:{}",
rapidjson::GetParseError_En(doc.GetParseError()));
}

View File

@ -30,7 +30,7 @@ class JsonCastFunction {
template <typename T, typename F>
std::optional<T>
cast(const F& t) const {
PanicInfo(Unsupported, "Not implemented");
ThrowInfo(Unsupported, "Not implemented");
}
template <typename T>

View File

@ -33,7 +33,7 @@ JsonCastType
JsonCastType::FromString(const std::string& str) {
auto it = json_cast_type_map_.find(str);
if (it == json_cast_type_map_.end()) {
PanicInfo(Unsupported, "Invalid json cast type: " + str);
ThrowInfo(Unsupported, "Invalid json cast type: " + str);
}
return it->second;
}
@ -62,7 +62,7 @@ JsonCastType::ToTantivyType() const {
case JsonCastType::DataType::VARCHAR:
return TantivyDataType::Keyword;
default:
PanicInfo(DataTypeInvalid, "Invalid data type:{}", element_type());
ThrowInfo(DataTypeInvalid, "Invalid data type:{}", element_type());
}
}
@ -76,7 +76,7 @@ JsonCastType::ToMilvusDataType() const {
case JsonCastType::DataType::VARCHAR:
return MilvusDataType::VARCHAR;
default:
PanicInfo(DataTypeInvalid, "Invalid data type:{}", element_type());
ThrowInfo(DataTypeInvalid, "Invalid data type:{}", element_type());
}
}

View File

@ -29,7 +29,7 @@ struct PatternMatchTranslator {
template <typename T>
inline std::string
operator()(const T& pattern) {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"pattern matching is only supported on string type");
}
};

View File

@ -143,7 +143,7 @@ GetDataTypeSize(DataType data_type, int dim = 1) {
// them. Caller of this method must handle this case themselves and must
// not pass variable length types to this method.
default: {
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("failed to get data type size, invalid type {}",
data_type));
@ -188,7 +188,7 @@ GetArrowDataType(DataType data_type, int dim = 1) {
case DataType::VECTOR_INT8:
return arrow::fixed_size_binary(dim);
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("failed to get data type, invalid type {}",
data_type));
}
@ -250,7 +250,7 @@ GetDataTypeName(DataType data_type) {
case DataType::VECTOR_ARRAY:
return "vector_array";
default:
PanicInfo(DataTypeInvalid, "Unsupported DataType({})", data_type);
ThrowInfo(DataTypeInvalid, "Unsupported DataType({})", data_type);
}
}

View File

@ -50,7 +50,7 @@ class VectorArray : public milvus::VectorTrait {
}
default: {
// TODO(SpadeA): add other vector types
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not implemented vector type: {}",
static_cast<int>(vector_field.data_case()));
}
@ -114,7 +114,7 @@ class VectorArray : public milvus::VectorTrait {
}
default: {
// TODO(SpadeA): add other vector types
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not implemented vector type: {}",
static_cast<int>(element_type_));
}
@ -137,7 +137,7 @@ class VectorArray : public milvus::VectorTrait {
}
default: {
// TODO(SpadeA): add other vector types
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not implemented vector type: {}",
static_cast<int>(element_type_));
}
@ -157,7 +157,7 @@ class VectorArray : public milvus::VectorTrait {
}
default: {
// TODO(SpadeA): add other vector types
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not implemented vector type: {}",
static_cast<int>(element_type_));
}
@ -217,7 +217,7 @@ class VectorArray : public milvus::VectorTrait {
}
default: {
// TODO(SpadeA): add other vector types
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not implemented vector type: {}",
static_cast<int>(element_type_));
}
@ -277,7 +277,7 @@ class VectorArrayView {
}
default: {
// TODO(SpadeA): add other vector types.
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not implemented vector type: {}",
static_cast<int>(element_type_));
}
@ -297,7 +297,7 @@ class VectorArrayView {
}
default: {
// TODO(SpadeA): add other vector types
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not implemented vector type: {}",
static_cast<int>(element_type_));
}
@ -332,7 +332,7 @@ class VectorArrayView {
}
default: {
// TODO(SpadeA): add other vector types
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not implemented vector type: {}",
static_cast<int>(element_type_));
}

View File

@ -63,13 +63,13 @@ KnowhereSetSimdType(const char* value) {
} else if (strcmp(value, "avx") == 0 || strcmp(value, "sse4_2") == 0) {
simd_type = knowhere::KnowhereConfig::SimdType::SSE4_2;
} else {
PanicInfo(ConfigInvalid, "invalid SIMD type: " + std::string(value));
ThrowInfo(ConfigInvalid, "invalid SIMD type: " + std::string(value));
}
try {
return knowhere::KnowhereConfig::SetSimdType(simd_type);
} catch (std::exception& e) {
LOG_ERROR(e.what());
PanicInfo(ConfigInvalid, e.what());
ThrowInfo(ConfigInvalid, e.what());
}
}
@ -87,7 +87,7 @@ void
KnowhereInitSearchThreadPool(const uint32_t num_threads) {
knowhere::KnowhereConfig::SetSearchThreadPoolSize(num_threads);
if (!knowhere::KnowhereConfig::SetAioContextPool(num_threads)) {
PanicInfo(ConfigInvalid,
ThrowInfo(ConfigInvalid,
"Failed to set aio context pool with num_threads " +
std::to_string(num_threads));
}
@ -99,7 +99,7 @@ KnowhereInitGPUMemoryPool(const uint32_t init_size, const uint32_t max_size) {
knowhere::KnowhereConfig::SetRaftMemPool();
return;
} else if (init_size > max_size) {
PanicInfo(ConfigInvalid,
ThrowInfo(ConfigInvalid,
"Error Gpu memory pool params: init_size {} can't not large "
"than max_size {}.",
init_size,

View File

@ -66,7 +66,7 @@ class BaseConfig {
virtual const std::unordered_map<std::string, std::string>&
values() const {
PanicInfo(NotImplemented, "method values() is not supported");
ThrowInfo(NotImplemented, "method values() is not supported");
}
virtual ~BaseConfig() = default;

View File

@ -68,7 +68,7 @@ PhyBinaryArithOpEvalRangeExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default: {
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported value type {} in expression",
value_type));
@ -90,7 +90,7 @@ PhyBinaryArithOpEvalRangeExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default: {
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported value type {} in expression",
value_type));
@ -99,7 +99,7 @@ PhyBinaryArithOpEvalRangeExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
expr_->column_.data_type_);
}
@ -259,7 +259,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -300,7 +300,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -341,7 +341,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -382,7 +382,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -423,7 +423,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -464,7 +464,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -473,7 +473,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForJson(
break;
}
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type);
@ -627,7 +627,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForArray(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -668,7 +668,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForArray(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -709,7 +709,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForArray(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -750,7 +750,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForArray(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -791,7 +791,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForArray(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -832,7 +832,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForArray(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -841,7 +841,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForArray(
break;
}
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type);
@ -990,7 +990,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForIndex(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1066,7 +1066,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForIndex(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1142,7 +1142,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForIndex(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1218,7 +1218,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForIndex(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1294,7 +1294,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForIndex(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1370,7 +1370,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForIndex(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1379,7 +1379,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForIndex(
break;
}
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type);
@ -1500,7 +1500,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForData(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1556,7 +1556,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForData(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1612,7 +1612,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForData(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1668,7 +1668,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForData(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1724,7 +1724,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForData(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1780,7 +1780,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForData(
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported arith type for binary "
"arithmetic eval expr: {}",
@ -1789,7 +1789,7 @@ PhyBinaryArithOpEvalRangeExpr::ExecRangeVisitorImplForData(
break;
}
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported operator type for binary "
"arithmetic eval expr: {}",
op_type);

View File

@ -136,7 +136,7 @@ struct ArithOpElementFunc {
res[i] =
(long(src[offset]) % long(right_operand)) == val;
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("unsupported arith type:{} for "
"ArithOpElementFunc",
arith_op));
@ -158,7 +158,7 @@ struct ArithOpElementFunc {
res[i] =
(long(src[offset]) % long(right_operand)) != val;
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("unsupported arith type:{} for "
"ArithOpElementFunc",
arith_op));
@ -181,7 +181,7 @@ struct ArithOpElementFunc {
res[i] =
(long(src[offset]) % long(right_operand)) > val;
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("unsupported arith type:{} for "
"ArithOpElementFunc",
arith_op));
@ -204,7 +204,7 @@ struct ArithOpElementFunc {
res[i] =
(long(src[offset]) % long(right_operand)) >= val;
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("unsupported arith type:{} for "
"ArithOpElementFunc",
arith_op));
@ -226,7 +226,7 @@ struct ArithOpElementFunc {
res[i] =
(long(src[offset]) % long(right_operand)) < val;
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("unsupported arith type:{} for "
"ArithOpElementFunc",
arith_op));
@ -248,7 +248,7 @@ struct ArithOpElementFunc {
res[i] =
(long(src[offset]) % long(right_operand)) <= val;
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("unsupported arith type:{} for "
"ArithOpElementFunc",
arith_op));
@ -269,14 +269,14 @@ struct ArithOpElementFunc {
res.inplace_arith_compare<T, arith_op_cvt, cmp_op_cvt>(
src, right_operand, val, size);
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported arith type:{} for ArithOpElementFunc",
arith_op));
}
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported cmp type:{} for ArithOpElementFunc",
cmp_op));
@ -328,7 +328,7 @@ struct ArithOpIndexFunc {
proto::plan::ArithOpType::Mod) {
res[i] = (long(raw.value()) % long(right_operand)) == val;
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported arith type:{} for ArithOpElementFunc",
@ -350,7 +350,7 @@ struct ArithOpIndexFunc {
proto::plan::ArithOpType::Mod) {
res[i] = (long(raw.value()) % long(right_operand)) != val;
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported arith type:{} for ArithOpElementFunc",
@ -372,7 +372,7 @@ struct ArithOpIndexFunc {
proto::plan::ArithOpType::Mod) {
res[i] = (long(raw.value()) % long(right_operand)) > val;
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported arith type:{} for ArithOpElementFunc",
@ -394,7 +394,7 @@ struct ArithOpIndexFunc {
proto::plan::ArithOpType::Mod) {
res[i] = (long(raw.value()) % long(right_operand)) >= val;
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported arith type:{} for ArithOpElementFunc",
@ -416,7 +416,7 @@ struct ArithOpIndexFunc {
proto::plan::ArithOpType::Mod) {
res[i] = (long(raw.value()) % long(right_operand)) < val;
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported arith type:{} for ArithOpElementFunc",
@ -438,7 +438,7 @@ struct ArithOpIndexFunc {
proto::plan::ArithOpType::Mod) {
res[i] = (long(raw.value()) % long(right_operand)) <= val;
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported arith type:{} for ArithOpElementFunc",

View File

@ -95,7 +95,7 @@ PhyBinaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format(
"unsupported value type {} in expression",
value_type));
@ -117,7 +117,7 @@ PhyBinaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format(
"unsupported value type {} in expression",
value_type));
@ -145,7 +145,7 @@ PhyBinaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default: {
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported value type {} in expression",
value_type));
@ -154,7 +154,7 @@ PhyBinaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
expr_->column_.data_type_);
}

View File

@ -59,7 +59,7 @@ PhyColumnExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
this->expr_->type());
}

View File

@ -250,7 +250,7 @@ PhyCompareFilterExpr::ExecCompareExprDispatcherForHybridSegment(
// case OpType::PostfixMatch: {
// }
default: {
PanicInfo(OpTypeInvalid, "unsupported optype: {}", expr_->op_type_);
ThrowInfo(OpTypeInvalid, "unsupported optype: {}", expr_->op_type_);
}
}
}
@ -274,7 +274,7 @@ PhyCompareFilterExpr::ExecCompareExprDispatcherForBothDataSegment(
case DataType::DOUBLE:
return ExecCompareLeftType<double>(context);
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported left datatype:{} of compare expr",
expr_->left_data_type_));
@ -300,7 +300,7 @@ PhyCompareFilterExpr::ExecCompareLeftType(EvalCtx& context) {
case DataType::DOUBLE:
return ExecCompareRightType<T, double>(context);
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported right datatype:{} of compare expr",
expr_->right_data_type_));
@ -407,7 +407,7 @@ PhyCompareFilterExpr::ExecCompareRightType(EvalCtx& context) {
break;
}
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("unsupported operator type for "
"compare column expr: {}",
expr_type));

View File

@ -61,7 +61,7 @@ struct CompareElementFunc {
} else if constexpr (op == proto::plan::OpType::LessEqual) {
res[i] = left[offset] <= right[offset];
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported op_type:{} for CompareElementFunc",
@ -89,7 +89,7 @@ struct CompareElementFunc {
} else if constexpr (op == proto::plan::OpType::LessEqual) {
res[i] = left[i] <= right[i];
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported op_type:{} for CompareElementFunc",
@ -118,7 +118,7 @@ struct CompareElementFunc {
res.inplace_compare_column<T, U, milvus::bitset::CompareOpType::LE>(
left, right, size);
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format(
"unsupported op_type:{} for CompareElementFunc", op));
}

View File

@ -80,7 +80,7 @@ class SingleElement : public BaseElement {
try {
return std::get<T>(value_);
} catch (const std::bad_variant_access& e) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"SingleElement GetValue() failed: {}",
e.what());
}

View File

@ -39,7 +39,7 @@ PhyExistsFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
expr_->column_.data_type_);
}
@ -91,7 +91,7 @@ PhyExistsFilterExpr::EvalJsonExistsForIndex() {
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
index->GetCastType());
}

View File

@ -307,7 +307,7 @@ CompileExpression(const expr::TypedExprPtr& expr,
context->query_config()->get_expr_batch_size(),
context->get_consistency_level());
} else {
PanicInfo(ExprInvalid, "unsupport expr: ", expr->ToString());
ThrowInfo(ExprInvalid, "unsupport expr: ", expr->ToString());
}
return result;
}

View File

@ -95,7 +95,7 @@ class Expr {
virtual std::string
ToString() const {
PanicInfo(ErrorCode::NotImplemented, "not implemented");
ThrowInfo(ErrorCode::NotImplemented, "not implemented");
}
virtual bool
@ -105,7 +105,7 @@ class Expr {
virtual std::optional<milvus::expr::ColumnInfo>
GetColumnInfo() const {
PanicInfo(ErrorCode::NotImplemented, "not implemented");
ThrowInfo(ErrorCode::NotImplemented, "not implemented");
}
std::vector<std::shared_ptr<Expr>>&
@ -836,7 +836,7 @@ class SegmentExpr : public Expr {
return ProcessAllChunksForMultipleChunk<T>(
func, skip_func, res, valid_res, values...);
} else {
PanicInfo(ErrorCode::Unsupported, "unreachable");
ThrowInfo(ErrorCode::Unsupported, "unreachable");
}
}
@ -974,7 +974,7 @@ class SegmentExpr : public Expr {
return ProcessIndexChunksForValid<std::string>();
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported element type: {}",
element_type);
}
@ -1037,7 +1037,7 @@ class SegmentExpr : public Expr {
use_index, input);
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported element type: {}",
element_type);
}

View File

@ -71,7 +71,7 @@ PhyJsonContainsFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
expr_->column_.data_type_);
}
@ -99,7 +99,7 @@ PhyJsonContainsFilterExpr::EvalJsonContainsForDataSegment(EvalCtx& context) {
return ExecArrayContains<std::string>(context);
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported data type {}", val_type));
}
@ -123,7 +123,7 @@ PhyJsonContainsFilterExpr::EvalJsonContainsForDataSegment(EvalCtx& context) {
return ExecJsonContainsArray(context);
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type:{}",
val_type);
}
@ -149,7 +149,7 @@ PhyJsonContainsFilterExpr::EvalJsonContainsForDataSegment(EvalCtx& context) {
return ExecArrayContainsAll<std::string>(context);
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported data type {}", val_type));
}
@ -173,7 +173,7 @@ PhyJsonContainsFilterExpr::EvalJsonContainsForDataSegment(EvalCtx& context) {
return ExecJsonContainsAllArray(context);
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type:{}",
val_type);
}
@ -183,7 +183,7 @@ PhyJsonContainsFilterExpr::EvalJsonContainsForDataSegment(EvalCtx& context) {
}
}
default:
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"unsupported json contains type {}",
proto::plan::JSONContainsExpr_JSONOp_Name(expr_->op_));
}
@ -1118,7 +1118,7 @@ PhyJsonContainsFilterExpr::ExecJsonContainsAllWithDiffType(EvalCtx& context) {
break;
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported data type {}",
element.val_case()));
@ -1302,7 +1302,7 @@ PhyJsonContainsFilterExpr::ExecJsonContainsAllWithDiffTypeByKeyIndex() {
break;
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported data type {}",
element.val_case()));
@ -1676,7 +1676,7 @@ PhyJsonContainsFilterExpr::ExecJsonContainsWithDiffType(EvalCtx& context) {
break;
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported data type {}",
element.val_case()));
@ -1844,7 +1844,7 @@ PhyJsonContainsFilterExpr::ExecJsonContainsWithDiffTypeByKeyIndex() {
break;
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported data type {}",
element.val_case()));
@ -1914,7 +1914,7 @@ PhyJsonContainsFilterExpr::EvalArrayContainsForIndexSegment(
return ExecArrayContainsForIndexSegmentImpl<std::string>();
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported data type for "
"ExecArrayContainsForIndexSegmentImpl: {}",
expr_->column_.element_type_));
@ -1957,7 +1957,7 @@ PhyJsonContainsFilterExpr::ExecArrayContainsForIndexSegmentImpl() {
return result;
}
default:
PanicInfo(
ThrowInfo(
ExprInvalid,
"unsupported array contains type {}",
proto::plan::JSONContainsExpr_JSONOp_Name(expr_->op_));

View File

@ -41,7 +41,7 @@ PhyLogicalBinaryExpr::Eval(EvalCtx& context, VectorPtr& result) {
LogicalElementFunc<LogicalOpType::Or> func;
func(lview, rview, size);
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported logical operator: {}",
expr_->GetOpTypeString());
}

View File

@ -41,7 +41,7 @@ struct LogicalElementFunc {
} else if constexpr (op == LogicalOpType::Or) {
left[i] |= right[i];
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid, "unsupported logical operator: {}", op);
}
}
@ -56,7 +56,7 @@ struct LogicalElementFunc {
} else if constexpr (op == LogicalOpType::Minus) {
left.inplace_sub(right, n);
} else {
PanicInfo(OpTypeInvalid, "unsupported logical operator: {}", op);
ThrowInfo(OpTypeInvalid, "unsupported logical operator: {}", op);
}
}
};

View File

@ -76,7 +76,7 @@ PhyNullExpr::Eval(EvalCtx& context, VectorPtr& result) {
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
expr_->column_.data_type_);
}
@ -134,7 +134,7 @@ PhyNullExpr::PreCheckNullable(OffsetVector* input) {
break;
}
default:
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"unsupported null expr type {}",
proto::plan::NullExpr_NullOp_Name(expr_->op_));
}

View File

@ -90,7 +90,7 @@ PhyTermFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
result = ExecVisitorImplTemplateJson<std::string>(context);
break;
default:
PanicInfo(DataTypeInvalid, "unknown data type: {}", type);
ThrowInfo(DataTypeInvalid, "unknown data type: {}", type);
}
break;
}
@ -119,12 +119,12 @@ PhyTermFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
result = ExecVisitorImplTemplateArray<std::string>(context);
break;
default:
PanicInfo(DataTypeInvalid, "unknown data type: {}", type);
ThrowInfo(DataTypeInvalid, "unknown data type: {}", type);
}
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
expr_->column_.data_type_);
}
@ -187,7 +187,7 @@ PhyTermFilterExpr::InitPkCacheOffset() {
break;
}
default: {
PanicInfo(DataTypeInvalid, "unsupported data type {}", pk_type_);
ThrowInfo(DataTypeInvalid, "unsupported data type {}", pk_type_);
}
}

View File

@ -81,7 +81,7 @@ PhyUnaryRangeFilterExpr::CanUseIndexForArray<milvus::Array>() {
res = CanUseIndexForArray<std::string_view>();
break;
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported element type when execute array "
"equal for index: {}",
expr_->column_.element_type_);
@ -140,7 +140,7 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplArrayForIndex<proto::plan::Array>(
}
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported element type when execute array "
"equal for index: {}",
expr_->column_.element_type_);
@ -220,7 +220,7 @@ PhyUnaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
result = ExecRangeVisitorImplForIndex<std::string>();
break;
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid, "unknown data type: {}", val_type);
}
} else {
@ -242,7 +242,7 @@ PhyUnaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
context);
break;
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid, "unknown data type: {}", val_type);
}
}
@ -278,13 +278,13 @@ PhyUnaryRangeFilterExpr::Eval(EvalCtx& context, VectorPtr& result) {
}
break;
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid, "unknown data type: {}", val_type);
}
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type: {}",
expr_->column_.data_type_);
}
@ -501,7 +501,7 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplArray(EvalCtx& context) {
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format(
"unsupported operator type for unary expr: {}",
@ -918,7 +918,7 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplJson(EvalCtx& context) {
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported operator type for unary expr: {}",
op_type));
@ -1479,7 +1479,7 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImpl(EvalCtx& context) {
if (expr_->op_type_ == proto::plan::OpType::TextMatch ||
expr_->op_type_ == proto::plan::OpType::PhraseMatch) {
if (has_offset_input_) {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("match query does not support iterative filter"));
}
@ -1574,7 +1574,7 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplForIndex() {
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported operator type for unary expr: {}",
op_type));
@ -1644,7 +1644,7 @@ PhyUnaryRangeFilterExpr::PreCheckOverflow(OffsetVector* input) {
return res_vec;
}
default: {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported range node {}",
expr_->op_type_);
}
@ -1811,7 +1811,7 @@ PhyUnaryRangeFilterExpr::ExecRangeVisitorImplForData(EvalCtx& context) {
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported operator type for unary expr: {}",
expr_type));
@ -1923,7 +1923,7 @@ PhyUnaryRangeFilterExpr::ExecTextMatch() {
} else if (op_type == proto::plan::OpType::PhraseMatch) {
return index->PhraseMatchQuery(query, slop);
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported operator type for match query: {}",
op_type);
}

View File

@ -107,7 +107,7 @@ struct UnaryElementFunc {
op == proto::plan::OpType::InnerMatch) {
res[i] = milvus::query::Match(src[offset], val, op);
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported op_type:{} for UnaryElementFunc",
op);
}
@ -144,7 +144,7 @@ struct UnaryElementFunc {
proto::plan::OpType::InnerMatch) {
res[i] = milvus::query::Match(src[i], val, op);
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported op_type:{} for UnaryElementFunc",
op);
}
@ -181,7 +181,7 @@ struct UnaryElementFunc {
res.inplace_compare_val<T, milvus::bitset::CompareOpType::LE>(
src, size, val);
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported op_type:{} for UnaryElementFunc",
op);
}
@ -283,7 +283,7 @@ struct UnaryElementFuncForArray {
res[i] = matcher(array_data);
}
} else {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
"unsupported op_type:{} for "
"UnaryElementFuncForArray",
op);
@ -313,7 +313,7 @@ struct UnaryIndexFuncForMatch {
}
if (!index->HasRawData()) {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"index don't support regex query and don't have "
"raw data");
}
@ -347,7 +347,7 @@ struct UnaryIndexFuncForMatch {
return res;
}
}
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"UnaryIndexFuncForMatch is only supported on string types");
}
};
@ -378,7 +378,7 @@ struct UnaryIndexFunc {
UnaryIndexFuncForMatch<T> func;
return func(index, val, op);
} else {
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("unsupported op_type:{} for UnaryIndexFunc", op));
}

View File

@ -39,12 +39,12 @@ GetColumnVector(const VectorPtr& result) {
convert_vector->child(0))) {
res = convert_flat_vector;
} else {
PanicInfo(
ThrowInfo(
UnexpectedError,
"RowVector result must have a first ColumnVector children");
}
} else {
PanicInfo(UnexpectedError,
ThrowInfo(UnexpectedError,
"expr result must have a ColumnVector or RowVector result");
}
return res;
@ -109,7 +109,7 @@ CompareTwoJsonArray(T arr1, const proto::plan::Array& arr2) {
break;
}
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"unsupported data type {}",
arr2.array(i).val_case());
}
@ -152,7 +152,7 @@ GetValueFromProtoInternal(const milvus::proto::plan::GenericValue& value_proto,
} else if constexpr (std::is_same_v<T, milvus::proto::plan::GenericValue>) {
return static_cast<T>(value_proto);
} else {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"unsupported generic value {}",
value_proto.DebugString());
}

View File

@ -94,7 +94,7 @@ PhyValueExpr::Eval(EvalCtx& context, VectorPtr& result) {
case DataType::ARRAY:
case DataType::JSON:
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
"PhyValueExpr not support data type " +
GetDataTypeName(expr_->type()));
}

View File

@ -21,7 +21,7 @@ namespace milvus::exec::expression::function {
void
CheckVarcharOrStringType(std::shared_ptr<SimpleVector>& vec) {
if (vec->type() != DataType::VARCHAR && vec->type() != DataType::STRING) {
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"invalid argument type, expect VARCHAR or STRING, actual {}",
vec->type());
}

View File

@ -30,7 +30,7 @@ namespace function {
void
EmptyVarchar(const RowVector& args, FilterFunctionReturn& result) {
if (args.childrens().size() != 1) {
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"invalid argument count, expect 1, actual {}",
args.childrens().size());
}

View File

@ -30,7 +30,7 @@ namespace function {
void
StartsWithVarchar(const RowVector& args, FilterFunctionReturn& result) {
if (args.childrens().size() != 2) {
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"invalid argument count, expect 2, actual {}",
args.childrens().size());
}

View File

@ -88,11 +88,11 @@ PhyFilterBitsNode::GetOutput() {
valid_bitset.append(valid_view);
num_processed_rows_ += col_vec_size;
} else {
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"PhyFilterBitsNode result should be bitmap");
}
} else {
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"PhyFilterBitsNode result should be ColumnVector");
}
}

View File

@ -154,11 +154,11 @@ PhyIterativeFilterNode::GetOutput() {
valid_bitset.append(valid_view);
num_processed_rows_ += col_vec_size;
} else {
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"PhyIterativeFilterNode result should be bitmap");
}
} else {
PanicInfo(
ThrowInfo(
ExprInvalid,
"PhyIterativeFilterNode result should be ColumnVector");
}

View File

@ -188,12 +188,12 @@ class SourceOperator : public Operator {
void
AddInput(RowVectorPtr& /* unused */) override {
PanicInfo(NotImplemented, "SourceOperator does not support addInput()");
ThrowInfo(NotImplemented, "SourceOperator does not support addInput()");
}
void
NoMoreInput() override {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"SourceOperator does not support noMoreInput()");
}

View File

@ -65,7 +65,7 @@ PrepareVectorIteratorsFromIndex(const SearchInfo& search_info,
"inside, terminate {} operation:{}",
operator_type,
knowhere::Status2String(iterators_val.error()));
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
fmt::format(
"Returned knowhere iterator has non-ready iterators "
@ -87,7 +87,7 @@ PrepareVectorIteratorsFromIndex(const SearchInfo& search_info,
"operation will be terminated",
e.what(),
operator_type);
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
fmt::format("Failed to {}, current index:" +
index.GetIndexType() + " doesn't support",
operator_type));

View File

@ -127,7 +127,7 @@ SearchGroupBy(const std::vector<std::shared_ptr<VectorIterator>>& iterators,
break;
}
default: {
PanicInfo(
ThrowInfo(
Unsupported,
fmt::format("unsupported data type {} for group by operator",
data_type));

View File

@ -90,7 +90,7 @@ class SealedDataGetter : public DataGetter<T> {
: segment_(segment), field_id_(field_id) {
from_data_ = segment_.HasFieldData(field_id_);
if (!from_data_ && !segment_.HasIndex(field_id_)) {
PanicInfo(
ThrowInfo(
UnexpectedError,
"The segment:{} used to init data getter has no effective "
"data source, neither"
@ -151,7 +151,7 @@ GetDataGetter(const segcore::SegmentInternalInterface& segment,
dynamic_cast<const segcore::SegmentSealed*>(&segment)) {
return std::make_shared<SealedDataGetter<T>>(*sealed_segment, fieldId);
} else {
PanicInfo(UnexpectedError,
ThrowInfo(UnexpectedError,
"The segment used to init data getter is neither growing or "
"sealed, wrong state");
}

View File

@ -51,7 +51,7 @@ struct ExprInfo {
case GenericValue::VAL_NOT_SET:
return true;
default:
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not supported GenericValue type");
}
}
@ -78,7 +78,7 @@ struct ExprInfo {
case GenericValue::VAL_NOT_SET:
break;
default:
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"Not supported GenericValue type");
}
return h;

View File

@ -86,7 +86,7 @@ BitmapIndex<T>::Build(size_t n, const T* data, const bool* valid_data) {
return;
}
if (n == 0) {
PanicInfo(DataIsEmpty, "BitmapIndex can not build null values");
ThrowInfo(DataIsEmpty, "BitmapIndex can not build null values");
}
total_num_rows_ = n;
@ -139,7 +139,7 @@ BitmapIndex<T>::BuildWithFieldData(
total_num_rows += field_data->get_num_rows();
}
if (total_num_rows == 0) {
PanicInfo(DataIsEmpty, "scalar bitmap index can not build null values");
ThrowInfo(DataIsEmpty, "scalar bitmap index can not build null values");
}
total_num_rows_ = total_num_rows;
valid_bitset_ = TargetBitmap(total_num_rows_, false);
@ -160,7 +160,7 @@ BitmapIndex<T>::BuildWithFieldData(
BuildArrayField(field_datas);
break;
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("Invalid data type: {} for build bitmap index",
proto::schema::DataType_Name(schema_.data_type())));
@ -494,7 +494,7 @@ BitmapIndex<T>::MMapIndexData(const std::string& file_name,
if (mmap_data_ == MAP_FAILED) {
file.Close();
remove(file_name.c_str());
PanicInfo(
ThrowInfo(
ErrorCode::UnexpectedError, "failed to mmap: {}", strerror(errno));
}
@ -731,7 +731,7 @@ BitmapIndex<T>::RangeForBitset(const T value, const OpType op) {
break;
}
default: {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("Invalid OperatorType: {}", op));
}
}
@ -803,7 +803,7 @@ BitmapIndex<T>::RangeForMmap(const T value, const OpType op) {
break;
}
default: {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("Invalid OperatorType: {}", op));
}
}
@ -864,7 +864,7 @@ BitmapIndex<T>::RangeForRoaring(const T value, const OpType op) {
break;
}
default: {
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("Invalid OperatorType: {}", op));
}
}
@ -1126,7 +1126,7 @@ BitmapIndex<T>::Reverse_Lookup(size_t idx) const {
}
}
}
PanicInfo(UnexpectedError,
ThrowInfo(UnexpectedError,
fmt::format(
"scalar bitmap index can not lookup target value of index {}",
idx));
@ -1168,7 +1168,7 @@ BitmapIndex<T>::ShouldSkip(const T lower_value,
break;
}
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("Invalid OperatorType for "
"checking scalar index optimization: {}",
op));

View File

@ -153,7 +153,7 @@ class BitmapIndex : public ScalarIndex<T> {
return RegexQuery(regex_pattern);
}
default:
PanicInfo(ErrorCode::OpTypeInvalid,
ThrowInfo(ErrorCode::OpTypeInvalid,
"not supported op type: {} for index PatterMatch",
op);
}

View File

@ -182,7 +182,7 @@ HybridScalarIndex<T>::SelectIndexBuildType(
} else if (IsArrayType(field_type_)) {
return SelectBuildTypeForArrayType(field_datas);
} else {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
fmt::format("unsupported build index for type {}",
DataType_Name(field_type_)));
}
@ -204,7 +204,7 @@ HybridScalarIndex<T>::GetInternalIndex() {
internal_index_ = std::make_shared<InvertedIndexTantivy<T>>(
tantivy_index_version_, file_manager_context_);
} else {
PanicInfo(UnexpectedError,
ThrowInfo(UnexpectedError,
"unknown index type when get internal index");
}
return internal_index_;
@ -227,7 +227,7 @@ HybridScalarIndex<std::string>::GetInternalIndex() {
internal_index_ = std::make_shared<InvertedIndexTantivy<std::string>>(
tantivy_index_version_, file_manager_context_);
} else {
PanicInfo(UnexpectedError,
ThrowInfo(UnexpectedError,
"unknown index type when get internal index");
}
return internal_index_;

View File

@ -79,11 +79,11 @@ IndexFactory::CreateNgramIndex(
params);
case DataType::JSON:
PanicInfo(
ThrowInfo(
NotImplemented,
fmt::format("building ngram index in json is not implemented"));
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("invalid data type to build ngram index: {}",
data_type));
}
@ -113,7 +113,7 @@ IndexFactory::CreatePrimitiveScalarIndex<std::string>(
}
return CreateStringIndexMarisa(file_manager_context);
#else
PanicInfo(Unsupported, "unsupported platform");
ThrowInfo(Unsupported, "unsupported platform");
#endif
}
@ -371,7 +371,7 @@ IndexFactory::CreatePrimitiveScalarIndex(
create_index_info, file_manager_context);
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("invalid data type to build index: {}", data_type));
}
@ -389,7 +389,7 @@ IndexFactory::CreateCompositeScalarIndex(
return CreatePrimitiveScalarIndex(
element_type, create_index_info, file_manager_context);
} else {
PanicInfo(
ThrowInfo(
Unsupported,
fmt::format("index type: {} for composite scalar not supported now",
index_type));
@ -400,7 +400,7 @@ IndexBasePtr
IndexFactory::CreateComplexScalarIndex(
IndexType index_type,
const storage::FileManagerContext& file_manager_context) {
PanicInfo(Unsupported, "Complex index not supported now");
ThrowInfo(Unsupported, "Complex index not supported now");
}
IndexBasePtr
@ -436,7 +436,7 @@ IndexFactory::CreateJsonIndex(
return std::make_unique<JsonFlatIndex>(file_manager_context,
nested_path);
default:
PanicInfo(DataTypeInvalid, "Invalid data type:{}", cast_dtype);
ThrowInfo(DataTypeInvalid, "Invalid data type:{}", cast_dtype);
}
}
@ -469,7 +469,7 @@ IndexFactory::CreateScalarIndex(
create_index_info.json_cast_function);
}
default:
PanicInfo(DataTypeInvalid, "Invalid data type:{}", data_type);
ThrowInfo(DataTypeInvalid, "Invalid data type:{}", data_type);
}
}
@ -509,7 +509,7 @@ IndexFactory::CreateVectorIndex(
// TODO caiyd, not support yet
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("invalid data type to build disk index: {}",
data_type));
@ -558,7 +558,7 @@ IndexFactory::CreateVectorIndex(
file_manager_context);
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("invalid data type to build mem index: {}",
data_type));

View File

@ -49,7 +49,7 @@ InvertedIndexTantivy<T>::InitForBuildIndex() {
boost::filesystem::create_directories(path_);
d_type_ = get_tantivy_data_type(schema_);
if (tantivy_index_exist(path_.c_str())) {
PanicInfo(IndexBuildError,
ThrowInfo(IndexBuildError,
"build inverted index temp dir:{} not empty",
path_);
}
@ -397,7 +397,7 @@ InvertedIndexTantivy<T>::Range(T value, OpType op) {
wrapper_->lower_bound_range_query(value, true, &bitset);
} break;
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("Invalid OperatorType: {}", op));
}
@ -613,7 +613,7 @@ InvertedIndexTantivy<T>::BuildWithFieldData(
}
default:
PanicInfo(ErrorCode::NotImplemented,
ThrowInfo(ErrorCode::NotImplemented,
fmt::format("Inverted index not supported on {}",
schema_.data_type()));
}

View File

@ -54,7 +54,7 @@ get_tantivy_data_type(proto::schema::DataType data_type) {
}
default:
PanicInfo(ErrorCode::NotImplemented,
ThrowInfo(ErrorCode::NotImplemented,
fmt::format("not implemented data type: {}", data_type));
}
}
@ -89,7 +89,7 @@ class InvertedIndexTantivy : public ScalarIndex<T> {
*/
void
Load(const BinarySet& binary_set, const Config& config = {}) override {
PanicInfo(ErrorCode::NotImplemented, "load v1 should be deprecated");
ThrowInfo(ErrorCode::NotImplemented, "load v1 should be deprecated");
}
void
@ -102,7 +102,7 @@ class InvertedIndexTantivy : public ScalarIndex<T> {
void
BuildWithDataset(const DatasetPtr& dataset,
const Config& config = {}) override {
PanicInfo(ErrorCode::NotImplemented,
ThrowInfo(ErrorCode::NotImplemented,
"BuildWithDataset should be deprecated");
}
@ -136,7 +136,7 @@ class InvertedIndexTantivy : public ScalarIndex<T> {
*/
void
Build(size_t n, const T* values, const bool* valid_data) override {
PanicInfo(ErrorCode::NotImplemented, "Build should not be called");
ThrowInfo(ErrorCode::NotImplemented, "Build should not be called");
}
const TargetBitmap
@ -179,7 +179,7 @@ class InvertedIndexTantivy : public ScalarIndex<T> {
std::optional<T>
Reverse_Lookup(size_t offset) const override {
PanicInfo(ErrorCode::NotImplemented,
ThrowInfo(ErrorCode::NotImplemented,
"Reverse_Lookup should not be handled by inverted index");
}
@ -216,7 +216,7 @@ class InvertedIndexTantivy : public ScalarIndex<T> {
return RegexQuery(regex_pattern);
}
default:
PanicInfo(
ThrowInfo(
ErrorCode::OpTypeInvalid,
"not supported op type: {} for inverted index PatternMatch",
op);
@ -261,7 +261,7 @@ class InvertedIndexTantivy : public ScalarIndex<T> {
virtual void
build_index_for_json(
const std::vector<std::shared_ptr<FieldDataBase>>& field_datas) {
PanicInfo(ErrorCode::NotImplemented,
ThrowInfo(ErrorCode::NotImplemented,
"build_index_for_json not implemented");
}

View File

@ -123,7 +123,7 @@ class JsonFlatIndexQueryExecutor : public InvertedIndexTantivy<T> {
json_path_, value, T(), false, true, true, false, &bitset);
} break;
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("Invalid OperatorType: {}", op));
}
return bitset;

View File

@ -235,7 +235,7 @@ JsonKeyStatsInvertedIndex::AddJson(
jsmn_parser parser;
jsmntok_t* tokens = (jsmntok_t*)malloc(16 * sizeof(jsmntok_t));
if (!tokens) {
PanicInfo(ErrorCode::UnexpectedError, "alloc jsmn token failed");
ThrowInfo(ErrorCode::UnexpectedError, "alloc jsmn token failed");
return;
}
int num_tokens = 0;
@ -252,12 +252,12 @@ JsonKeyStatsInvertedIndex::AddJson(
tokens = (jsmntok_t*)realloc(
tokens, token_capacity * sizeof(jsmntok_t));
if (!tokens) {
PanicInfo(ErrorCode::UnexpectedError, "realloc failed");
ThrowInfo(ErrorCode::UnexpectedError, "realloc failed");
}
continue;
} else {
free(tokens);
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Failed to parse Json: {}, error: {}",
json,
int(r));

View File

@ -65,7 +65,7 @@ ScalarIndex<T>::Query(const DatasetPtr& dataset) {
case OpType::PrefixMatch:
case OpType::PostfixMatch:
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("unsupported operator type: {}", op));
}
}

View File

@ -72,7 +72,7 @@ class ScalarIndex : public IndexBase {
void
BuildWithDataset(const DatasetPtr& dataset,
const Config& config = {}) override {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"scalar index don't support build index with dataset");
};
@ -96,14 +96,14 @@ class ScalarIndex : public IndexBase {
InApplyFilter(size_t n,
const T* values,
const std::function<bool(size_t /* offset */)>& filter) {
PanicInfo(ErrorCode::Unsupported, "InApplyFilter is not implemented");
ThrowInfo(ErrorCode::Unsupported, "InApplyFilter is not implemented");
}
virtual void
InApplyCallback(size_t n,
const T* values,
const std::function<void(size_t /* offset */)>& callback) {
PanicInfo(ErrorCode::Unsupported, "InApplyCallback is not implemented");
ThrowInfo(ErrorCode::Unsupported, "InApplyCallback is not implemented");
}
virtual const TargetBitmap
@ -131,7 +131,7 @@ class ScalarIndex : public IndexBase {
virtual const TargetBitmap
PatternMatch(const std::string& pattern, proto::plan::OpType op) {
PanicInfo(Unsupported, "pattern match is not supported");
ThrowInfo(Unsupported, "pattern match is not supported");
}
virtual bool
@ -158,17 +158,17 @@ class ScalarIndex : public IndexBase {
virtual const TargetBitmap
RegexQuery(const std::string& pattern) {
PanicInfo(Unsupported, "regex query is not supported");
ThrowInfo(Unsupported, "regex query is not supported");
}
virtual void
BuildWithFieldData(const std::vector<FieldDataPtr>& field_datas) {
PanicInfo(Unsupported, "BuildwithFieldData is not supported");
ThrowInfo(Unsupported, "BuildwithFieldData is not supported");
}
virtual void
LoadWithoutAssemble(const BinarySet& binary_set, const Config& config) {
PanicInfo(Unsupported, "LoadWithoutAssemble is not supported");
ThrowInfo(Unsupported, "LoadWithoutAssemble is not supported");
}
};

View File

@ -63,7 +63,7 @@ ScalarIndexSort<T>::Build(size_t n, const T* values, const bool* valid_data) {
if (is_built_)
return;
if (n == 0) {
PanicInfo(DataIsEmpty, "ScalarIndexSort cannot build null values!");
ThrowInfo(DataIsEmpty, "ScalarIndexSort cannot build null values!");
}
data_.reserve(n);
total_num_rows_ = n;
@ -95,7 +95,7 @@ ScalarIndexSort<T>::BuildWithFieldData(
length += data->get_num_rows() - data->get_null_count();
}
if (total_num_rows_ == 0) {
PanicInfo(DataIsEmpty, "ScalarIndexSort cannot build null values!");
ThrowInfo(DataIsEmpty, "ScalarIndexSort cannot build null values!");
}
data_.reserve(length);
@ -306,7 +306,7 @@ ScalarIndexSort<T>::Range(const T value, const OpType op) {
data_.begin(), data_.end(), IndexStructure<T>(value));
break;
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("Invalid OperatorType: {}", op));
}
for (; lb < ub; ++lb) {
@ -398,7 +398,7 @@ ScalarIndexSort<T>::ShouldSkip(const T lower_value,
break;
}
default:
PanicInfo(OpTypeInvalid,
ThrowInfo(OpTypeInvalid,
fmt::format("Invalid OperatorType for "
"checking scalar index optimization: {}",
op));

View File

@ -64,7 +64,7 @@ valid_str_id(size_t str_id) {
void
StringIndexMarisa::Build(const Config& config) {
if (built_) {
PanicInfo(IndexAlreadyBuild, "index has been built");
ThrowInfo(IndexAlreadyBuild, "index has been built");
}
auto field_datas =
storage::CacheRawDataAndFillMissing(file_manager_, config);
@ -119,7 +119,7 @@ StringIndexMarisa::Build(size_t n,
const std::string* values,
const bool* valid_data) {
if (built_) {
PanicInfo(IndexAlreadyBuild, "index has been built");
ThrowInfo(IndexAlreadyBuild, "index has been built");
}
marisa::Keyset keyset;
@ -408,7 +408,7 @@ StringIndexMarisa::Range(std::string value, OpType op) {
break;
}
default:
PanicInfo(
ThrowInfo(
OpTypeInvalid,
fmt::format("Invalid OperatorType: {}", static_cast<int>(op)));
}

View File

@ -394,7 +394,7 @@ ReadDataFromFD(int fd, void* buf, size_t size, size_t chunk_size) {
const size_t count = (size < chunk_size) ? size : chunk_size;
const ssize_t size_read = read(fd, buf, count);
if (size_read != count) {
PanicInfo(ErrorCode::UnistdError,
ThrowInfo(ErrorCode::UnistdError,
"read data from fd error, returned read size is " +
std::to_string(size_read));
}

View File

@ -109,12 +109,12 @@ GetValueFromConfig(const Config& cfg, const std::string& key) {
LOG_WARN("config type mismatch for key {}: {}", key, e.what());
return std::nullopt;
}
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"config type error for key {}: {}",
key,
e.what());
} catch (const std::exception& e) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Unexpected error for key {}: {}",
key,
e.what());

View File

@ -67,9 +67,9 @@ VectorDiskAnnIndex<T>::VectorDiskAnnIndex(
} else {
auto err = get_index_obj.error();
if (err == knowhere::Status::invalid_index_error) {
PanicInfo(ErrorCode::Unsupported, get_index_obj.what());
ThrowInfo(ErrorCode::Unsupported, get_index_obj.what());
}
PanicInfo(ErrorCode::KnowhereError, get_index_obj.what());
ThrowInfo(ErrorCode::KnowhereError, get_index_obj.what());
}
}
@ -108,7 +108,7 @@ VectorDiskAnnIndex<T>::Load(milvus::tracer::TraceContext ctx,
milvus::tracer::GetTracer()->WithActiveSpan(span_load_engine);
auto stat = index_.Deserialize(knowhere::BinarySet(), load_config);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to Deserialize index, " + KnowhereStatusString(stat));
span_load_engine->End();
@ -121,7 +121,7 @@ VectorDiskAnnIndex<T>::Upload(const Config& config) {
BinarySet ret;
auto stat = index_.Serialize(ret);
if (stat != knowhere::Status::success) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to serialize index, " + KnowhereStatusString(stat));
}
auto remote_paths_to_size = file_manager_->GetRemotePathsToFileSize();
@ -170,7 +170,7 @@ VectorDiskAnnIndex<T>::Build(const Config& config) {
build_config.erase(VEC_OPT_FIELDS);
auto stat = index_.Build({}, build_config);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::IndexBuildError,
ThrowInfo(ErrorCode::IndexBuildError,
"failed to build disk index, " + KnowhereStatusString(stat));
local_chunk_manager->RemoveDir(
@ -224,7 +224,7 @@ VectorDiskAnnIndex<T>::BuildWithDataset(const DatasetPtr& dataset,
auto stat = index_.Build({}, build_config);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::IndexBuildError,
ThrowInfo(ErrorCode::IndexBuildError,
"failed to build index, " + KnowhereStatusString(stat));
local_chunk_manager->RemoveDir(
storage::GetSegmentRawDataPathPrefix(local_chunk_manager, segment_id));
@ -267,7 +267,7 @@ VectorDiskAnnIndex<T>::Query(const DatasetPtr dataset,
search_info, topk, GetMetricType(), search_config)) {
auto res = index_.RangeSearch(dataset, search_config, bitset);
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
fmt::format("failed to range search: {}: {}",
KnowhereStatusString(res.error()),
res.what()));
@ -277,7 +277,7 @@ VectorDiskAnnIndex<T>::Query(const DatasetPtr dataset,
} else {
auto res = index_.Search(dataset, search_config, bitset);
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
fmt::format("failed to search: {}: {}",
KnowhereStatusString(res.error()),
res.what()));
@ -326,7 +326,7 @@ std::vector<uint8_t>
VectorDiskAnnIndex<T>::GetVector(const DatasetPtr dataset) const {
auto index_type = GetIndexType();
if (IndexIsSparse(index_type)) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to get vector, index is sparse");
}
@ -337,7 +337,7 @@ VectorDiskAnnIndex<T>::GetVector(const DatasetPtr dataset) const {
auto res = index_.GetVectorByIds(dataset);
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
fmt::format("failed to get vector: {}: {}",
KnowhereStatusString(res.error()),
res.what()));

View File

@ -82,7 +82,7 @@ class VectorDiskAnnIndex : public VectorIndex {
std::unique_ptr<const knowhere::sparse::SparseRow<float>[]>
GetSparseVector(const DatasetPtr dataset) const override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"get sparse vector not supported for disk index");
}

View File

@ -45,13 +45,13 @@ class VectorIndex : public IndexBase {
BuildWithRawDataForUT(size_t n,
const void* values,
const Config& config = {}) override {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"vector index don't support build index with raw data");
};
virtual void
AddWithDataset(const DatasetPtr& dataset, const Config& config) {
PanicInfo(Unsupported, "vector index don't support add with dataset");
ThrowInfo(Unsupported, "vector index don't support add with dataset");
}
virtual void
@ -64,7 +64,7 @@ class VectorIndex : public IndexBase {
VectorIterators(const DatasetPtr dataset,
const knowhere::Json& json,
const BitsetView& bitset) const {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
"VectorIndex:" + this->GetIndexType() +
" didn't implement VectorIterator interface, "
"there must be sth wrong in the code");

View File

@ -82,9 +82,9 @@ VectorMemIndex<T>::VectorMemIndex(
} else {
auto err = get_index_obj.error();
if (err == knowhere::Status::invalid_index_error) {
PanicInfo(ErrorCode::Unsupported, get_index_obj.what());
ThrowInfo(ErrorCode::Unsupported, get_index_obj.what());
}
PanicInfo(ErrorCode::KnowhereError, get_index_obj.what());
ThrowInfo(ErrorCode::KnowhereError, get_index_obj.what());
}
}
@ -108,9 +108,9 @@ VectorMemIndex<T>::VectorMemIndex(const IndexType& index_type,
} else {
auto err = get_index_obj.error();
if (err == knowhere::Status::invalid_index_error) {
PanicInfo(ErrorCode::Unsupported, get_index_obj.what());
ThrowInfo(ErrorCode::Unsupported, get_index_obj.what());
}
PanicInfo(ErrorCode::KnowhereError, get_index_obj.what());
ThrowInfo(ErrorCode::KnowhereError, get_index_obj.what());
}
}
@ -139,7 +139,7 @@ VectorMemIndex<T>::Serialize(const Config& config) {
knowhere::BinarySet ret;
auto stat = index_.Serialize(ret);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to serialize index: {}",
KnowhereStatusString(stat));
Disassemble(ret);
@ -153,7 +153,7 @@ VectorMemIndex<T>::LoadWithoutAssemble(const BinarySet& binary_set,
const Config& config) {
auto stat = index_.Deserialize(binary_set, config);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to Deserialize index: {}",
KnowhereStatusString(stat));
SetDim(index_.Dim());
@ -296,7 +296,7 @@ VectorMemIndex<T>::BuildWithDataset(const DatasetPtr& dataset,
knowhere::TimeRecorder rc("BuildWithoutIds", 1);
auto stat = index_.Build(dataset, index_config, use_knowhere_build_pool_);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::IndexBuildError,
ThrowInfo(ErrorCode::IndexBuildError,
"failed to build index, " + KnowhereStatusString(stat));
rc.ElapseFromBegin("Done");
SetDim(index_.Dim());
@ -393,7 +393,7 @@ VectorMemIndex<T>::AddWithDataset(const DatasetPtr& dataset,
knowhere::TimeRecorder rc("AddWithDataset", 1);
auto stat = index_.Add(dataset, index_config, use_knowhere_build_pool_);
if (stat != knowhere::Status::success)
PanicInfo(ErrorCode::IndexBuildError,
ThrowInfo(ErrorCode::IndexBuildError,
"failed to append index, " + KnowhereStatusString(stat));
rc.ElapseFromBegin("Done");
}
@ -419,7 +419,7 @@ VectorMemIndex<T>::Query(const DatasetPtr dataset,
auto res = index_.RangeSearch(dataset, search_conf, bitset);
milvus::tracer::AddEvent("finish_knowhere_index_range_search");
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to range search: {}: {}",
KnowhereStatusString(res.error()),
res.what());
@ -433,7 +433,7 @@ VectorMemIndex<T>::Query(const DatasetPtr dataset,
auto res = index_.Search(dataset, search_conf, bitset);
milvus::tracer::AddEvent("finish_knowhere_index_search");
if (!res.has_value()) {
PanicInfo(
ThrowInfo(
ErrorCode::UnexpectedError,
// escape json brace in case of using message as format
"failed to search: config={} {}: {}",
@ -476,7 +476,7 @@ std::vector<uint8_t>
VectorMemIndex<T>::GetVector(const DatasetPtr dataset) const {
auto index_type = GetIndexType();
if (IndexIsSparse(index_type)) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to get vector, index is sparse");
}
@ -487,7 +487,7 @@ VectorMemIndex<T>::GetVector(const DatasetPtr dataset) const {
auto res = index_.GetVectorByIds(dataset);
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to get vector, " + KnowhereStatusString(res.error()));
}
auto tensor = res.value()->GetTensor();
@ -505,7 +505,7 @@ std::unique_ptr<const knowhere::sparse::SparseRow<float>[]>
VectorMemIndex<T>::GetSparseVector(const DatasetPtr dataset) const {
auto res = index_.GetVectorByIds(dataset);
if (!res.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to get vector, " + KnowhereStatusString(res.error()));
}
// release and transfer ownership to the result unique ptr.
@ -638,7 +638,7 @@ void VectorMemIndex<T>::LoadFromFile(const Config& config) {
auto deserialize_duration =
std::chrono::system_clock::now() - start_deserialize;
if (stat != knowhere::Status::success) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"failed to Deserialize index: {}",
KnowhereStatusString(stat));
}

View File

@ -73,11 +73,11 @@ class IndexFactory {
return std::make_unique<VecIndexCreator>(type, config, context);
case DataType::VECTOR_ARRAY:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("VECTOR_ARRAY is not implemented"));
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("invalid type is {}", invalid_dtype_msg));
}
}

View File

@ -37,7 +37,7 @@ ScalarIndexCreator::ScalarIndexCreator(
if (index_type_ == milvus::index::NGRAM_INDEX_TYPE) {
if (!config.contains(milvus::index::MIN_GRAM) ||
!config.contains(milvus::index::MAX_GRAM)) {
PanicInfo(
ThrowInfo(
milvus::ErrorCode::InvalidParameter,
"Ngram index must specify both min_gram and max_gram");
}

View File

@ -220,7 +220,7 @@ class ChunkedColumnBase : public ChunkedColumnInterface {
PinWrapper<SpanBase>
Span(int64_t chunk_id) const override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"Span only supported for ChunkedColumn");
}
@ -228,7 +228,7 @@ class ChunkedColumnBase : public ChunkedColumnInterface {
BulkValueAt(std::function<void(const char*, size_t)> fn,
const int64_t* offsets,
int64_t count) override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkValueAt only supported for ChunkedColumn and "
"ProxyChunkColumn");
}
@ -237,7 +237,7 @@ class ChunkedColumnBase : public ChunkedColumnInterface {
BulkPrimitiveValueAt(void* dst,
const int64_t* offsets,
int64_t count) override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkPrimitiveValueAt only supported for ChunkedColumn");
}
@ -246,7 +246,7 @@ class ChunkedColumnBase : public ChunkedColumnInterface {
const int64_t* offsets,
int64_t element_sizeof,
int64_t count) override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkVectorValueAt only supported for ChunkedColumn");
}
@ -254,7 +254,7 @@ class ChunkedColumnBase : public ChunkedColumnInterface {
StringViews(int64_t chunk_id,
std::optional<std::pair<int64_t, int64_t>> offset_len =
std::nullopt) const override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"StringViews only supported for VariableColumn");
}
@ -262,13 +262,13 @@ class ChunkedColumnBase : public ChunkedColumnInterface {
ArrayViews(
int64_t chunk_id,
std::optional<std::pair<int64_t, int64_t>> offset_len) const override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"ArrayViews only supported for ArrayChunkedColumn");
}
PinWrapper<std::vector<VectorArrayView>>
VectorArrayViews(int64_t chunk_id) const override {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"VectorArrayViews only supported for ChunkedVectorArrayColumn");
}
@ -276,7 +276,7 @@ class ChunkedColumnBase : public ChunkedColumnInterface {
PinWrapper<std::pair<std::vector<std::string_view>, FixedVector<bool>>>
ViewsByOffsets(int64_t chunk_id,
const FixedVector<int32_t>& offsets) const override {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"ViewsByOffsets only supported for VariableColumn");
}
@ -400,7 +400,7 @@ class ChunkedColumn : public ChunkedColumnBase {
break;
}
default: {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"BulkScalarValueAt is not supported for unknown scalar "
"data type: {}",
@ -473,7 +473,7 @@ class ChunkedVariableColumn : public ChunkedColumnBase {
const int64_t* offsets,
int64_t count) const override {
if constexpr (!std::is_same_v<T, std::string>) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkRawStringAt only supported for "
"ChunkedVariableColumn<std::string>");
}
@ -509,7 +509,7 @@ class ChunkedVariableColumn : public ChunkedColumnBase {
const int64_t* offsets,
int64_t count) const override {
if constexpr (!std::is_same_v<T, Json>) {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"RawJsonAt only supported for ChunkedVariableColumn<Json>");
}

View File

@ -249,7 +249,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
PinWrapper<SpanBase>
Span(int64_t chunk_id) const override {
if (!IsChunkedColumnDataType(data_type_)) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"Span only supported for ChunkedColumn");
}
auto chunk_wrapper = group_->GetGroupChunk(chunk_id);
@ -263,7 +263,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
std::optional<std::pair<int64_t, int64_t>> offset_len =
std::nullopt) const override {
if (!IsChunkedVariableColumnDataType(data_type_)) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"StringViews only supported for ChunkedVariableColumn");
}
auto chunk_wrapper = group_->GetGroupChunk(chunk_id);
@ -279,7 +279,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
std::optional<std::pair<int64_t, int64_t>> offset_len =
std::nullopt) const override {
if (!IsChunkedArrayColumnDataType(data_type_)) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"ArrayViews only supported for ChunkedArrayColumn");
}
auto chunk_wrapper = group_->GetGroupChunk(chunk_id);
@ -292,7 +292,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
PinWrapper<std::vector<VectorArrayView>>
VectorArrayViews(int64_t chunk_id) const override {
if (!IsChunkedVectorArrayColumnDataType(data_type_)) {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"VectorArrayViews only supported for ChunkedVectorArrayColumn");
}
@ -307,7 +307,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
ViewsByOffsets(int64_t chunk_id,
const FixedVector<int32_t>& offsets) const override {
if (!IsChunkedVariableColumnDataType(data_type_)) {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"ViewsByOffsets only supported for ChunkedVariableColumn");
}
@ -409,7 +409,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
break;
}
default: {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"BulkScalarValueAt is not supported for unknown scalar "
"data type: {}",
@ -440,7 +440,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
int64_t count = 0) const override {
if (!IsChunkedVariableColumnDataType(data_type_) ||
data_type_ == DataType::JSON) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkRawStringAt only supported for ProxyChunkColumn of "
"variable length type(except Json)");
}
@ -478,7 +478,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
const int64_t* offsets,
int64_t count) const override {
if (data_type_ != DataType::JSON) {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"RawJsonAt only supported for ProxyChunkColumn of Json type");
}
@ -503,7 +503,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
const int64_t* offsets,
int64_t count) const override {
if (!IsChunkedArrayColumnDataType(data_type_)) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkArrayAt only supported for ChunkedArrayColumn");
}
auto [cids, offsets_in_chunk] = ToChunkIdAndOffset(offsets, count);
@ -523,7 +523,7 @@ class ProxyChunkColumn : public ChunkedColumnInterface {
const int64_t* offsets,
int64_t count) const override {
if (!IsChunkedVectorArrayColumnDataType(data_type_)) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkVectorArrayAt only supported for "
"ChunkedVectorArrayColumn");
}

View File

@ -130,7 +130,7 @@ class ChunkedColumnInterface {
BulkRawStringAt(std::function<void(std::string_view, size_t, bool)> fn,
const int64_t* offsets = nullptr,
int64_t count = 0) const {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkRawStringAt only supported for ChunkColumnInterface of "
"variable length type");
}
@ -139,7 +139,7 @@ class ChunkedColumnInterface {
BulkRawJsonAt(std::function<void(Json, size_t, bool)> fn,
const int64_t* offsets,
int64_t count) const {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"RawJsonAt only supported for ChunkColumnInterface of Json type");
}
@ -148,7 +148,7 @@ class ChunkedColumnInterface {
BulkArrayAt(std::function<void(ScalarFieldProto&&, size_t)> fn,
const int64_t* offsets,
int64_t count) const {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"BulkArrayAt only supported for ChunkedArrayColumn");
}
@ -156,7 +156,7 @@ class ChunkedColumnInterface {
BulkVectorArrayAt(std::function<void(VectorFieldProto&&, size_t)> fn,
const int64_t* offsets,
int64_t count) const {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"BulkVectorArrayAt only supported for ChunkedVectorArrayColumn");
}
@ -199,7 +199,7 @@ class ChunkedColumnInterface {
auto num_rows = NumRows();
for (int64_t i = 0; i < count; i++) {
if (offsets[i] < 0 || offsets[i] >= num_rows) {
PanicInfo(ErrorCode::OutOfRange,
ThrowInfo(ErrorCode::OutOfRange,
"offsets[{}] {} is out of range, num_rows: {}",
i,
offsets[i],

View File

@ -23,7 +23,7 @@ CachedSearchIterator::CachedSearchIterator(
const SearchInfo& search_info,
const BitsetView& bitset) {
if (query_ds == nullptr) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Query dataset is nullptr, cannot initialize iterator");
}
nq_ = query_ds->GetRows();
@ -38,7 +38,7 @@ CachedSearchIterator::CachedSearchIterator(
if (expected_iterators.has_value()) {
iterators_ = std::move(expected_iterators.value());
} else {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Failed to create iterators from index");
}
}
@ -66,7 +66,7 @@ CachedSearchIterator::InitializeChunkedIterators(
std::make_move_iterator(chunk_iterators.begin()),
std::make_move_iterator(chunk_iterators.end()));
} else {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Failed to create iterators from index");
}
offset += chunk_size;
@ -83,12 +83,12 @@ CachedSearchIterator::CachedSearchIterator(
const BitsetView& bitset,
const milvus::DataType& data_type) {
if (vec_data == nullptr) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Vector data is nullptr, cannot initialize iterator");
}
if (row_count <= 0) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Number of rows is 0, cannot initialize iterator");
}
@ -122,7 +122,7 @@ CachedSearchIterator::CachedSearchIterator(
const BitsetView& bitset,
const milvus::DataType& data_type) {
if (column == nullptr) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Column is nullptr, cannot initialize iterator");
}
@ -158,7 +158,7 @@ CachedSearchIterator::NextBatch(const SearchInfo& search_info,
}
if (iterators_.size() != nq_ * num_chunks_) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Iterator size mismatch, expect %d, but got %d",
nq_ * num_chunks_,
iterators_.size());
@ -181,13 +181,13 @@ CachedSearchIterator::NextBatch(const SearchInfo& search_info,
void
CachedSearchIterator::ValidateSearchInfo(const SearchInfo& search_info) {
if (!search_info.iterator_v2_info_.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Iterator v2 SearchInfo is not set");
}
auto iterator_v2_info = search_info.iterator_v2_info_.value();
if (iterator_v2_info.batch_size != batch_size_) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Batch size mismatch, expect %d, but got %d",
batch_size_,
iterator_v2_info.batch_size);
@ -314,19 +314,19 @@ CachedSearchIterator::WriteSingleQuerySearchResult(
void
CachedSearchIterator::Init(const SearchInfo& search_info) {
if (!search_info.iterator_v2_info_.has_value()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Iterator v2 info is not set, cannot initialize iterator");
}
auto iterator_v2_info = search_info.iterator_v2_info_.value();
if (iterator_v2_info.batch_size == 0) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Batch size is 0, cannot initialize iterator");
}
batch_size_ = iterator_v2_info.batch_size;
if (search_info.metric_type_.empty()) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Metric type is empty, cannot initialize iterator");
}
if (PositivelyRelated(search_info.metric_type_)) {
@ -336,13 +336,13 @@ CachedSearchIterator::Init(const SearchInfo& search_info) {
}
if (nq_ == 0) {
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"Number of queries is 0, cannot initialize iterator");
}
// disable multi-query for now
if (nq_ > 1) {
PanicInfo(
ThrowInfo(
ErrorCode::UnexpectedError,
"Number of queries is greater than 1, cannot initialize iterator");
}

View File

@ -105,7 +105,7 @@ ExecPlanNodeVisitor::ExecuteTask(
BitsetTypeView view(vec->GetRawData(), vec->size());
bitset_holder.append(view);
} else {
PanicInfo(UnexpectedError, "expr return type not matched");
ThrowInfo(UnexpectedError, "expr return type not matched");
}
}
return bitset_holder;

View File

@ -60,7 +60,7 @@ ParsePlaceholderGroup(const Plan* plan,
} else {
auto line_size = info.values().Get(0).size();
if (field_meta.get_sizeof() != line_size) {
PanicInfo(
ThrowInfo(
DimNotMatch,
fmt::format("vector dimension mismatch, expected vector "
"size(byte) {}, actual {}.",
@ -90,7 +90,7 @@ ParsePlanNodeProto(proto::plan::PlanNode& plan_node,
auto res = plan_node.ParsePartialFromCodedStream(&input_stream);
if (!res) {
PanicInfo(UnexpectedError, "parse plan node proto failed");
ThrowInfo(UnexpectedError, "parse plan node proto failed");
}
}

View File

@ -62,7 +62,7 @@ ProtoParser::PlanNodeFromProto(const planpb::PlanNode& plan_node_proto) {
search_info.iterative_filter_execution = true;
} else {
// check if hints is valid
PanicInfo(ConfigInvalid,
ThrowInfo(ConfigInvalid,
"hints: {} not supported",
query_info_proto.hints());
}
@ -71,7 +71,7 @@ ProtoParser::PlanNodeFromProto(const planpb::PlanNode& plan_node_proto) {
search_info.iterative_filter_execution = true;
} else {
// check if hints is valid
PanicInfo(ConfigInvalid,
ThrowInfo(ConfigInvalid,
"hints: {} not supported",
search_info.search_params_[HINTS]);
}
@ -395,7 +395,7 @@ ProtoParser::ParseCallExprs(const proto::plan::CallExpr& expr_pb) {
auto function = factory.GetFilterFunction(func_sig);
if (function == nullptr) {
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
"function " + func_sig.ToString() + " not found. ");
}
return std::make_shared<expr::CallExpr>(
@ -577,14 +577,14 @@ ProtoParser::ParseExprs(const proto::plan::Expr& expr_pb,
default: {
std::string s;
google::protobuf::TextFormat::PrintToString(expr_pb, &s);
PanicInfo(ExprInvalid,
ThrowInfo(ExprInvalid,
std::string("unsupported expr proto node: ") + s);
}
}
if (type_check(result->type())) {
return result;
}
PanicInfo(
ThrowInfo(
ExprInvalid, "expr type check failed, actual type: {}", result->type());
}

View File

@ -29,13 +29,13 @@ RelationalImpl(const T& t, const U& u, FundamentalTag, FundamentalTag) {
template <typename Op, typename T, typename U>
bool
RelationalImpl(const T& t, const U& u, FundamentalTag, StringTag) {
PanicInfo(DataTypeInvalid, "incompitible data type");
ThrowInfo(DataTypeInvalid, "incompitible data type");
}
template <typename Op, typename T, typename U>
bool
RelationalImpl(const T& t, const U& u, StringTag, FundamentalTag) {
PanicInfo(DataTypeInvalid, "incompitible data type");
ThrowInfo(DataTypeInvalid, "incompitible data type");
}
template <typename Op, typename T, typename U>
@ -58,7 +58,7 @@ struct Relational {
template <typename... T>
bool
operator()(const T&...) const {
PanicInfo(OpTypeInvalid, "incompatible operands");
ThrowInfo(OpTypeInvalid, "incompatible operands");
}
};

View File

@ -50,7 +50,7 @@ generate_scalar_index(SpanBase data, DataType data_type) {
case DataType::VARCHAR:
return generate_scalar_index(Span<std::string>(data));
default:
PanicInfo(DataTypeInvalid, "unsupported type {}", data_type);
ThrowInfo(DataTypeInvalid, "unsupported type {}", data_type);
}
}

View File

@ -149,14 +149,14 @@ BruteForceSearch(const dataset::SearchDataset& query_ds,
res = knowhere::BruteForce::RangeSearch<int8>(
base_dataset, query_dataset, search_cfg, bitset);
} else {
PanicInfo(
ThrowInfo(
ErrorCode::Unsupported,
"Unsupported dataType for chunk brute force range search:{}",
data_type);
}
milvus::tracer::AddEvent("knowhere_finish_BruteForce_RangeSearch");
if (!res.has_value()) {
PanicInfo(KnowhereError,
ThrowInfo(KnowhereError,
"Brute force range search fail: {}, {}",
KnowhereStatusString(res.error()),
res.what());
@ -219,13 +219,13 @@ BruteForceSearch(const dataset::SearchDataset& query_ds,
search_cfg,
bitset);
} else {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"Unsupported dataType for chunk brute force search:{}",
data_type);
}
milvus::tracer::AddEvent("knowhere_finish_BruteForce_SearchWithBuf");
if (stat != knowhere::Status::success) {
PanicInfo(KnowhereError,
ThrowInfo(KnowhereError,
"Brute force search fail: " + KnowhereStatusString(stat));
}
}
@ -257,7 +257,7 @@ DispatchBruteForceIteratorByDataType(const knowhere::DataSetPtr& base_dataset,
return knowhere::BruteForce::AnnIterator<int8>(
base_dataset, query_dataset, config, bitset);
default:
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"Unsupported dataType for chunk brute force iterator:{}",
data_type);
}
@ -306,7 +306,7 @@ PackBruteForceSearchIteratorsIntoSubResult(
LOG_ERROR(
"Failed to get valid knowhere brute-force-iterators from chunk, "
"terminate search_group_by operation");
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"Returned knowhere brute-force-iterator has non-ready "
"iterators inside, terminate search_group_by operation");
}

View File

@ -21,7 +21,7 @@ namespace milvus::query {
template <typename T, typename U>
inline bool
Match(const T& x, const U& y, OpType op) {
PanicInfo(NotImplemented, "not supported");
ThrowInfo(NotImplemented, "not supported");
}
template <>
@ -35,7 +35,7 @@ Match<std::string>(const std::string& str, const std::string& val, OpType op) {
case OpType::InnerMatch:
return InnerMatch(str, val);
default:
PanicInfo(OpTypeInvalid, "not supported");
ThrowInfo(OpTypeInvalid, "not supported");
}
}
@ -52,7 +52,7 @@ Match<std::string_view>(const std::string_view& str,
case OpType::InnerMatch:
return InnerMatch(str, val);
default:
PanicInfo(OpTypeInvalid, "not supported");
ThrowInfo(OpTypeInvalid, "not supported");
}
}

View File

@ -95,7 +95,7 @@ ChunkedSegmentSealedImpl::LoadIndex(const LoadIndexInfo& info) {
auto& field_meta = schema_->operator[](field_id);
if (field_meta.get_data_type() == DataType::VECTOR_ARRAY) {
PanicInfo(DataTypeInvalid, "VECTOR_ARRAY is not implemented");
ThrowInfo(DataTypeInvalid, "VECTOR_ARRAY is not implemented");
}
if (field_meta.is_vector()) {
@ -497,7 +497,7 @@ ChunkedSegmentSealedImpl::num_chunk_index(FieldId field_id) const {
auto& field_meta = schema_->operator[](field_id);
if (field_meta.get_data_type() == DataType::VECTOR_ARRAY) {
PanicInfo(DataTypeInvalid, "VECTOR_ARRAY is not implemented");
ThrowInfo(DataTypeInvalid, "VECTOR_ARRAY is not implemented");
}
if (field_meta.is_vector()) {
@ -566,7 +566,7 @@ ChunkedSegmentSealedImpl::chunk_data_impl(FieldId field_id,
if (auto it = fields_.find(field_id); it != fields_.end()) {
return it->second->Span(chunk_id);
}
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"chunk_data_impl only used for chunk column field ");
}
@ -582,7 +582,7 @@ ChunkedSegmentSealedImpl::chunk_array_view_impl(
if (auto it = fields_.find(field_id); it != fields_.end()) {
return it->second->ArrayViews(chunk_id, offset_len);
}
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"chunk_array_view_impl only used for chunk column field ");
}
@ -599,7 +599,7 @@ ChunkedSegmentSealedImpl::chunk_string_view_impl(
auto column = it->second;
return column->StringViews(chunk_id, offset_len);
}
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"chunk_string_view_impl only used for variable column field ");
}
@ -614,7 +614,7 @@ ChunkedSegmentSealedImpl::chunk_view_by_offsets(
if (auto it = fields_.find(field_id); it != fields_.end()) {
return it->second->ViewsByOffsets(chunk_id, offsets);
}
PanicInfo(ErrorCode::UnexpectedError,
ThrowInfo(ErrorCode::UnexpectedError,
"chunk_view_by_offsets only used for variable column field ");
}
@ -832,7 +832,7 @@ ChunkedSegmentSealedImpl::check_search(const query::Plan* plan) const {
"Extra info of search plan doesn't have value");
if (!is_system_field_ready()) {
PanicInfo(FieldNotLoaded,
ThrowInfo(FieldNotLoaded,
"failed to load row ID or timestamp, potential missing "
"bin logs or "
"empty segments. Segment ID = " +
@ -857,7 +857,7 @@ ChunkedSegmentSealedImpl::check_search(const query::Plan* plan) const {
auto& field_meta = plan->schema_->operator[](field_id);
// request field may has added field
if (!field_meta.is_nullable()) {
PanicInfo(FieldNotLoaded,
ThrowInfo(FieldNotLoaded,
"User Field(" + field_meta.get_name().get() +
") is not loaded");
}
@ -935,7 +935,7 @@ ChunkedSegmentSealedImpl::search_sorted_pk(const PkType& pk,
break;
}
default: {
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format(
"unsupported type {}",
@ -1034,10 +1034,10 @@ ChunkedSegmentSealedImpl::bulk_subscript(SystemFieldType system_type,
static_cast<Timestamp*>(output));
break;
case SystemFieldType::RowId:
PanicInfo(ErrorCode::Unsupported, "RowId retrieve not supported");
ThrowInfo(ErrorCode::Unsupported, "RowId retrieve not supported");
break;
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unknown subscript fields", system_type));
}
}
@ -1442,7 +1442,7 @@ ChunkedSegmentSealedImpl::get_raw_data(FieldId field_id,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported data type {}",
field_meta.get_data_type()));
}
@ -1624,7 +1624,7 @@ ChunkedSegmentSealedImpl::search_ids(const IdArray& id_array,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported type {}", data_type));
}
}
@ -1697,7 +1697,7 @@ ChunkedSegmentSealedImpl::LoadSegmentMeta(
slice_lengths.push_back(info.row_count());
}
insert_record_.timestamp_index_.set_length_meta(std::move(slice_lengths));
PanicInfo(NotImplemented, "unimplemented");
ThrowInfo(NotImplemented, "unimplemented");
}
int64_t

View File

@ -56,7 +56,7 @@ VectorBase::set_data_raw(ssize_t element_offset,
}
return set_data_raw(element_offset, data_raw.data(), element_count);
} else {
PanicInfo(DataTypeInvalid, "unsupported vector type");
ThrowInfo(DataTypeInvalid, "unsupported vector type");
}
}
@ -121,7 +121,7 @@ VectorBase::set_data_raw(ssize_t element_offset,
return set_data_raw(element_offset, data_raw.data(), element_count);
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}",
field_meta.get_data_type()));
}

View File

@ -201,13 +201,13 @@ class ConcurrentVectorImpl : public VectorBase {
SpanBase
get_span_base(int64_t chunk_id) const override {
if constexpr (std::is_same_v<Type, VectorArray>) {
PanicInfo(NotImplemented, "unimplemented");
ThrowInfo(NotImplemented, "unimplemented");
} else if constexpr (is_type_entire_row) {
return chunks_ptr_->get_span(chunk_id);
} else if constexpr (std::is_same_v<Type, int64_t> || // NOLINT
std::is_same_v<Type, int>) {
// only for testing
PanicInfo(NotImplemented, "unimplemented");
ThrowInfo(NotImplemented, "unimplemented");
} else {
auto chunk_data = chunks_ptr_->get_chunk_data(chunk_id);
auto chunk_size = chunks_ptr_->get_chunk_size(chunk_id);
@ -275,13 +275,13 @@ class ConcurrentVectorImpl : public VectorBase {
int64_t
get_element_size() const override {
if constexpr (std::is_same_v<Type, VectorArray>) {
PanicInfo(NotImplemented, "unimplemented");
ThrowInfo(NotImplemented, "unimplemented");
} else if constexpr (is_type_entire_row) {
return chunks_ptr_->get_element_size();
} else if constexpr (std::is_same_v<Type, int64_t> || // NOLINT
std::is_same_v<Type, int>) {
// only for testing
PanicInfo(NotImplemented, "unimplemented");
ThrowInfo(NotImplemented, "unimplemented");
} else {
static_assert(
std::is_same_v<typename TraitType::embedded_type, Type>);

View File

@ -339,7 +339,7 @@ CreateIndex(const FieldMeta& field_meta,
segcore_config,
field_raw_data);
} else {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported vector type in index: {}",
field_meta.get_data_type()));
}
@ -370,7 +370,7 @@ CreateIndex(const FieldMeta& field_meta,
return std::make_unique<ScalarFieldIndexing<std::string>>(
field_meta, segcore_config);
default:
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported scalar type in index: {}",
field_meta.get_data_type()));
}

View File

@ -110,7 +110,7 @@ class ScalarFieldIndexing : public FieldIndexing {
int64_t size,
const VectorBase* vec_base,
const void* data_source) override {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"scalar index doesn't support append vector segment index");
}
@ -120,7 +120,7 @@ class ScalarFieldIndexing : public FieldIndexing {
int64_t new_data_dim,
const VectorBase* vec_base,
const void* data_source) override {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"scalar index doesn't support append vector segment index");
}
@ -129,7 +129,7 @@ class ScalarFieldIndexing : public FieldIndexing {
int64_t count,
int64_t element_size,
void* output) override {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"scalar index don't support get data from index");
}

View File

@ -93,7 +93,7 @@ class OffsetOrderedMap : public OffsetMap {
void
seal() override {
PanicInfo(
ThrowInfo(
NotImplemented,
"OffsetOrderedMap used for growing segment could not be sealed.");
}
@ -200,7 +200,7 @@ class OffsetOrderedArray : public OffsetMap {
void
insert(const PkType& pk, int64_t offset) override {
if (is_sealed) {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"OffsetOrderedArray could not insert after seal");
}
array_.push_back(
@ -306,7 +306,7 @@ struct InsertRecord {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported pk type",
field_meta.get_data_type()));
}
@ -365,7 +365,7 @@ struct InsertRecord {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported primary key data type",
data_type));
}
@ -446,7 +446,7 @@ struct InsertRecord<false> : public InsertRecord<true> {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported pk type",
field_meta.get_data_type()));
}
@ -482,7 +482,7 @@ struct InsertRecord<false> : public InsertRecord<true> {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported primary key data type",
data_type));
}
@ -570,7 +570,7 @@ struct InsertRecord<false> : public InsertRecord<true> {
dense_vec_mmap_descriptor);
return;
} else {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported vector type",
field_meta.get_data_type()));
}
@ -628,7 +628,7 @@ struct InsertRecord<false> : public InsertRecord<true> {
return;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported scalar type",
field_meta.get_data_type()));
}

View File

@ -120,7 +120,7 @@ AssembleGroupByValues(
break;
}
default: {
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported datatype for group_by operations ",
group_by_data_type));

View File

@ -37,7 +37,7 @@ apply_parser(const YAML::Node& node, Func func) {
results.emplace_back(func(element));
}
} else {
PanicInfo(ConfigInvalid, "node should be scalar or sequence");
ThrowInfo(ConfigInvalid, "node should be scalar or sequence");
}
return results;
}
@ -102,7 +102,7 @@ SegcoreConfig::parse_from(const std::string& config_path) {
} catch (const std::exception& e) {
std::string str =
std::string("Invalid Yaml: ") + config_path + ", err: " + e.what();
PanicInfo(ConfigInvalid, str);
ThrowInfo(ConfigInvalid, str);
}
}

View File

@ -117,7 +117,7 @@ class SegcoreConfig {
} else if (refine_type == "UINT8") {
refine_type_ = knowhere::RefineType::UINT8_QUANT;
} else {
PanicInfo(Unsupported,
ThrowInfo(Unsupported,
"unsupported refine type for intermin index.");
}
}

View File

@ -188,7 +188,7 @@ SegmentChunkReader::GetChunkDataAccessor(DataType data_type,
field_id, index, current_chunk_id, current_chunk_pos);
}
default:
PanicInfo(DataTypeInvalid, "unsupported data type: {}", data_type);
ThrowInfo(DataTypeInvalid, "unsupported data type: {}", data_type);
}
}
@ -301,7 +301,7 @@ SegmentChunkReader::GetChunkDataAccessor(DataType data_type,
field_id, chunk_id, data_barrier);
}
default:
PanicInfo(DataTypeInvalid, "unsupported data type: {}", data_type);
ThrowInfo(DataTypeInvalid, "unsupported data type: {}", data_type);
}
}

View File

@ -620,7 +620,7 @@ SegmentGrowingImpl::chunk_string_view_impl(
int64_t chunk_id,
std::optional<std::pair<int64_t, int64_t>> offset_len =
std::nullopt) const {
PanicInfo(ErrorCode::NotImplemented,
ThrowInfo(ErrorCode::NotImplemented,
"chunk string view impl not implement for growing segment");
}
@ -630,7 +630,7 @@ SegmentGrowingImpl::chunk_array_view_impl(
int64_t chunk_id,
std::optional<std::pair<int64_t, int64_t>> offset_len =
std::nullopt) const {
PanicInfo(ErrorCode::NotImplemented,
ThrowInfo(ErrorCode::NotImplemented,
"chunk array view impl not implement for growing segment");
}
@ -639,7 +639,7 @@ SegmentGrowingImpl::chunk_view_by_offsets(
FieldId field_id,
int64_t chunk_id,
const FixedVector<int32_t>& offsets) const {
PanicInfo(ErrorCode::NotImplemented,
ThrowInfo(ErrorCode::NotImplemented,
"chunk view by offsets not implemented for growing segment");
}
@ -763,7 +763,7 @@ SegmentGrowingImpl::bulk_subscript(FieldId field_id,
->mutable_vector_array()
->mutable_data());
} else {
PanicInfo(DataTypeInvalid, "logical error");
ThrowInfo(DataTypeInvalid, "logical error");
}
return result;
}
@ -880,7 +880,7 @@ SegmentGrowingImpl::bulk_subscript(FieldId field_id,
break;
}
default: {
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported type {}", field_meta.get_data_type()));
}
@ -1061,11 +1061,11 @@ SegmentGrowingImpl::bulk_subscript(SystemFieldType system_type,
static_cast<Timestamp*>(output));
break;
case SystemFieldType::RowId:
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"RowId retrieve is not supported");
break;
default:
PanicInfo(DataTypeInvalid, "unknown subscript fields");
ThrowInfo(DataTypeInvalid, "unknown subscript fields");
}
}
@ -1098,7 +1098,7 @@ SegmentGrowingImpl::search_ids(const IdArray& id_array,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported type {}", data_type));
}
}

View File

@ -120,7 +120,7 @@ SegmentInternalInterface::Retrieve(tracer::TraceContext* trace_ctx,
output_data_size += get_field_avg_size(field_id) * result_rows;
}
if (output_data_size > limit_size) {
PanicInfo(
ThrowInfo(
RetrieveError,
fmt::format("query results exceed the limit size ", limit_size));
}
@ -240,7 +240,7 @@ SegmentInternalInterface::FillTargetEntry(
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}",
field_meta.get_data_type()));
}
@ -322,7 +322,7 @@ SegmentInternalInterface::get_field_avg_size(FieldId field_id) const {
return sizeof(int64_t);
}
PanicInfo(FieldIDInvalid, "unsupported system field id");
ThrowInfo(FieldIDInvalid, "unsupported system field id");
}
auto schema = get_schema();
@ -433,7 +433,7 @@ SegmentInternalInterface::bulk_subscript_not_exist_field(
const milvus::FieldMeta& field_meta, int64_t count) const {
auto data_type = field_meta.get_data_type();
if (IsVectorDataType(data_type)) {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported added field type {}",
field_meta.get_data_type()));
}
@ -512,7 +512,7 @@ SegmentInternalInterface::bulk_subscript_not_exist_field(
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported default value type {}",
field_meta.get_data_type()));
}

View File

@ -212,7 +212,7 @@ class SegmentInternalInterface : public SegmentInterface {
int64_t start_offset,
int64_t length) const {
if (this->type() == SegmentType::Growing) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"get chunk views not supported for growing segment");
}
return chunk_view<ViewType>(
@ -225,7 +225,7 @@ class SegmentInternalInterface : public SegmentInterface {
int64_t chunk_id,
const FixedVector<int32_t>& offsets) const {
if (this->type() == SegmentType::Growing) {
PanicInfo(ErrorCode::Unsupported,
ThrowInfo(ErrorCode::Unsupported,
"get chunk views not supported for growing segment");
}
auto pw = chunk_view_by_offsets(field_id, chunk_id, offsets);
@ -518,7 +518,7 @@ class SegmentInternalInterface : public SegmentInterface {
chunk_index_impl(FieldId field_id,
const std::string& path,
int64_t chunk_id) const {
PanicInfo(ErrorCode::NotImplemented, "not implemented");
ThrowInfo(ErrorCode::NotImplemented, "not implemented");
};
virtual bool

View File

@ -47,7 +47,7 @@ ParsePksFromFieldData(std::vector<PkType>& pks, const DataArray& data) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported PK {}", data_type));
}
}
@ -77,7 +77,7 @@ ParsePksFromFieldData(DataType data_type,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported PK {}", data_type));
}
}
@ -102,7 +102,7 @@ ParsePksFromIDs(std::vector<PkType>& pks,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported PK {}", data_type));
}
}
@ -118,7 +118,7 @@ GetSizeOfIdArray(const IdArray& data) {
return data.str_id().data_size();
}
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported id {}", data.descriptor()->name()));
}
@ -202,7 +202,7 @@ GetRawDataSizeOfDataArray(const DataArray* data,
break;
}
default:
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported element type for array",
field_meta.get_element_type()));
@ -225,7 +225,7 @@ GetRawDataSizeOfDataArray(const DataArray* data,
break;
}
default: {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
fmt::format("not implemented vector type {}",
field_meta.get_element_type()));
}
@ -233,7 +233,7 @@ GetRawDataSizeOfDataArray(const DataArray* data,
break;
}
default: {
PanicInfo(
ThrowInfo(
DataTypeInvalid,
fmt::format("unsupported variable datatype {}", data_type));
}
@ -324,7 +324,7 @@ CreateEmptyScalarDataArray(int64_t count, const FieldMeta& field_meta) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}", data_type));
}
}
@ -394,7 +394,7 @@ CreateEmptyVectorDataArray(int64_t count, const FieldMeta& field_meta) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}", data_type));
}
}
@ -489,7 +489,7 @@ CreateScalarDataArrayFrom(const void* data_raw,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}", data_type));
}
}
@ -579,7 +579,7 @@ CreateVectorDataArrayFrom(const void* data_raw,
break;
}
default: {
PanicInfo(NotImplemented,
ThrowInfo(NotImplemented,
fmt::format("not implemented vector type {}",
vector_type));
}
@ -587,7 +587,7 @@ CreateVectorDataArrayFrom(const void* data_raw,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}", data_type));
}
}
@ -668,9 +668,9 @@ MergeDataArray(std::vector<MergeBase>& merge_bases,
auto obj = vector_array->mutable_int8_vector();
obj->assign(data, dim * sizeof(int8));
} else if (field_meta.get_data_type() == DataType::VECTOR_ARRAY) {
PanicInfo(DataTypeInvalid, "VECTOR_ARRAY is not implemented");
ThrowInfo(DataTypeInvalid, "VECTOR_ARRAY is not implemented");
} else {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}", data_type));
}
continue;
@ -738,7 +738,7 @@ MergeDataArray(std::vector<MergeBase>& merge_bases,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}", data_type));
}
}
@ -926,7 +926,7 @@ ReverseDataFromIndex(const index::IndexBase* index,
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported datatype {}", data_type));
}
}

View File

@ -53,7 +53,7 @@ AppendLoadFieldInfo(CLoadFieldDataInfo c_load_field_data_info,
static_cast<LoadFieldDataInfo*>(c_load_field_data_info);
auto iter = load_field_data_info->field_infos.find(field_id);
if (iter != load_field_data_info->field_infos.end()) {
PanicInfo(milvus::ErrorCode::FieldAlreadyExist,
ThrowInfo(milvus::ErrorCode::FieldAlreadyExist,
"append same field info multi times");
}
FieldBinlogInfo binlog_info;
@ -79,7 +79,7 @@ AppendLoadFieldDataPath(CLoadFieldDataInfo c_load_field_data_info,
static_cast<LoadFieldDataInfo*>(c_load_field_data_info);
auto iter = load_field_data_info->field_infos.find(field_id);
if (iter == load_field_data_info->field_infos.end()) {
PanicInfo(milvus::ErrorCode::FieldIDInvalid,
ThrowInfo(milvus::ErrorCode::FieldIDInvalid,
"please append field info first");
}
std::string file_path(c_file_path);

View File

@ -267,7 +267,7 @@ EstimateLoadIndexResource(CLoadIndexInfo c_load_index_info) {
load_index_info->enable_mmap);
return request;
} catch (std::exception& e) {
PanicInfo(milvus::UnexpectedError,
ThrowInfo(milvus::UnexpectedError,
fmt::format("failed to estimate index load resource, "
"encounter exception : {}",
e.what()));

View File

@ -19,7 +19,7 @@ struct Int64PKVisitor {
template <typename T>
int64_t
operator()(T t) const {
PanicInfo(Unsupported, "invalid int64 pk value");
ThrowInfo(Unsupported, "invalid int64 pk value");
}
};
@ -33,7 +33,7 @@ struct StrPKVisitor {
template <typename T>
std::string
operator()(T t) const {
PanicInfo(Unsupported, "invalid string pk value");
ThrowInfo(Unsupported, "invalid string pk value");
}
};

View File

@ -357,7 +357,7 @@ ReduceHelper::GetSearchResultDataSlice(int slice_index) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported primary key type {}", pk_type));
}
}
@ -405,7 +405,7 @@ ReduceHelper::GetSearchResultDataSlice(int slice_index) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported primary key type {}",
pk_type));
}
@ -440,7 +440,7 @@ ReduceHelper::GetSearchResultDataSlice(int slice_index) {
->set_element_type(
proto::schema::DataType(field_meta.get_element_type()));
} else if (field_meta.get_data_type() == DataType::VECTOR_ARRAY) {
PanicInfo(NotImplemented, "VECTOR_ARRAY is not implemented");
ThrowInfo(NotImplemented, "VECTOR_ARRAY is not implemented");
}
search_result_data->mutable_fields_data()->AddAllocated(
field_data.release());

View File

@ -155,7 +155,7 @@ StreamReducerHelper::AssembleMergedResult() {
->set_element_type(
proto::schema::DataType(field_meta.get_element_type()));
} else if (field_meta.get_data_type() == DataType::VECTOR_ARRAY) {
PanicInfo(NotImplemented, "VECTOR_ARRAY is not implemented");
ThrowInfo(NotImplemented, "VECTOR_ARRAY is not implemented");
}
new_merged_result->output_fields_data_[field_id] =
@ -579,7 +579,7 @@ StreamReducerHelper::GetSearchResultDataSlice(int slice_index) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported primary key type {}", pk_type));
}
}
@ -635,7 +635,7 @@ StreamReducerHelper::GetSearchResultDataSlice(int slice_index) {
break;
}
default: {
PanicInfo(DataTypeInvalid,
ThrowInfo(DataTypeInvalid,
fmt::format("unsupported primary key type {}",
pk_type));
}
@ -674,7 +674,7 @@ StreamReducerHelper::GetSearchResultDataSlice(int slice_index) {
->set_element_type(
proto::schema::DataType(field_meta.get_element_type()));
} else if (field_meta.get_data_type() == DataType::VECTOR_ARRAY) {
PanicInfo(NotImplemented, "VECTOR_ARRAY is not implemented");
ThrowInfo(NotImplemented, "VECTOR_ARRAY is not implemented");
}
search_result_data->mutable_fields_data()->AddAllocated(
field_data.release());

Some files were not shown because too many files have changed in this diff Show More