test: [E2e Refactor] update search basic tests and add a pk_name instead of hard code (#41669)

related issue: https://github.com/milvus-io/milvus/issues/40698

---------

Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com>
This commit is contained in:
yanliang567 2025-05-09 21:58:54 +08:00 committed by GitHub
parent d32b802752
commit ee659d50db
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
45 changed files with 2166 additions and 2589 deletions

View File

@ -1,3 +1,4 @@
import pandas.core.frame
from pymilvus.client.types import CompactionPlans
from pymilvus import Role
@ -209,7 +210,6 @@ class ResponseChecker:
collection = res
elif isinstance(res, tuple):
collection = res[0]
log.debug(collection.schema)
else:
raise Exception("The result to check isn't collection type object")
if len(check_items) == 0:
@ -394,6 +394,9 @@ class ResponseChecker:
expected: check the search is ok
"""
log.info("search_results_check: checking the searching results")
enable_milvus_client_api = check_items.get("enable_milvus_client_api", False)
pk_name = check_items.get("pk_name", ct.default_primary_field_name)
if func_name != 'search' and func_name != 'hybrid_search':
log.warning("The function name is {} rather than {} or {}".format(func_name, "search", "hybrid_search"))
if len(check_items) == 0:
@ -403,11 +406,12 @@ class ResponseChecker:
search_res.done()
search_res = search_res.result()
if check_items.get("output_fields", None):
assert set(search_res[0][0].entity.fields) == set(check_items["output_fields"])
log.info('search_results_check: Output fields of query searched is correct')
if check_items.get("original_entities", None):
original_entities = check_items["original_entities"][0]
pc.output_field_value_check(search_res, original_entities)
assert set(search_res[0][0].entity.fields.keys()) == set(check_items["output_fields"])
original_entities = check_items.get("original_entities", None)
if original_entities is not None:
if not isinstance(original_entities, pandas.core.frame.DataFrame):
original_entities = pandas.DataFrame(original_entities)
pc.output_field_value_check(search_res, original_entities, pk_name=pk_name)
if len(search_res) != check_items["nq"]:
log.error("search_results_check: Numbers of query searched (%d) "
"is not equal with expected (%d)"
@ -415,16 +419,14 @@ class ResponseChecker:
assert len(search_res) == check_items["nq"]
else:
log.info("search_results_check: Numbers of query searched is correct")
enable_milvus_client_api = check_items.get("enable_milvus_client_api", False)
# log.debug(search_res)
nq_i = 0
for hits in search_res:
searched_original_vectors = []
ids = []
distances = []
if enable_milvus_client_api:
for hit in hits:
ids.append(hit['id'])
ids.append(hit[pk_name])
distances.append(hit['distance'])
else:
ids = list(hits.ids)
@ -438,8 +440,7 @@ class ResponseChecker:
assert len(ids) == check_items["limit"]
else:
if check_items.get("ids", None) is not None:
ids_match = pc.list_contain_check(ids,
list(check_items["ids"]))
ids_match = pc.list_contain_check(ids, list(check_items["ids"]))
if not ids_match:
log.error("search_results_check: ids searched not match")
assert ids_match
@ -452,12 +453,6 @@ class ResponseChecker:
if check_items.get("vector_nq") is None or check_items.get("original_vectors") is None:
log.debug("skip distance check for knowhere does not return the precise distances")
else:
# for id in ids:
# searched_original_vectors.append(check_items["original_vectors"][id])
# cf.compare_distance_vector_and_vector_list(check_items["vector_nq"][nq_i],
# searched_original_vectors,
# check_items["metric"], distances)
# log.info("search_results_check: Checked the distances for one nq: OK")
pass
else:
pass # just check nq and topk, not specific ids need check
@ -544,10 +539,10 @@ class ResponseChecker:
raise Exception("No expect values found in the check task")
exp_res = check_items.get("exp_res", None)
with_vec = check_items.get("with_vec", False)
primary_field = check_items.get("primary_field", None)
pk_name = check_items.get("pk_name", ct.default_primary_field_name)
if exp_res is not None:
if isinstance(query_res, list):
assert pc.equal_entities_list(exp=exp_res, actual=query_res, primary_field=primary_field,
assert pc.equal_entities_list(exp=exp_res, actual=query_res, primary_field=pk_name,
with_vec=with_vec)
return True
else:
@ -575,8 +570,7 @@ class ResponseChecker:
log.info("search iteration finished, close")
query_iterator.close()
break
pk_name = ct.default_int64_field_name if res[0].get(ct.default_int64_field_name, None) is not None \
else ct.default_string_field_name
pk_name = check_items.get("pk_name", ct.default_primary_field_name)
for i in range(len(res)):
pk_list.append(res[i][pk_name])
if check_items.get("limit", None):

View File

@ -158,7 +158,7 @@ def entity_in(entity, entities, primary_field):
:param primary_field: collection primary field
:return: True or False
"""
primary_default = ct.default_int64_field_name
primary_default = ct.default_primary_field_name
primary_field = primary_default if primary_field is None else primary_field
primary_key = entity.get(primary_field, None)
primary_keys = []
@ -180,7 +180,7 @@ def remove_entity(entity, entities, primary_field):
:param primary_field: collection primary field
:return: entities of removed entity
"""
primary_default = ct.default_int64_field_name
primary_default = ct.default_primary_field_name
primary_field = primary_default if primary_field is None else primary_field
primary_key = entity.get(primary_field, None)
primary_keys = []
@ -226,16 +226,17 @@ def equal_entities_list(exp, actual, primary_field, with_vec=False):
return True if len(exp) == 0 else False
def output_field_value_check(search_res, original):
def output_field_value_check(search_res, original, pk_name):
"""
check if the value of output fields is correct, it only works on auto_id = False
:param search_res: the search result of specific output fields
:param original: the data in the collection
:return: True or False
"""
pk_name = ct.default_primary_field_name if pk_name is None else pk_name
limit = len(search_res[0])
for i in range(limit):
entity = search_res[0][i]['entity']
entity = search_res[0][i].fields
_id = search_res[0][i].id
for field in entity.keys():
if isinstance(entity[field], list):
@ -246,7 +247,7 @@ def output_field_value_check(search_res, original):
# but sparse only supports list data type insertion for now
assert entity[field].keys() == original[-1][_id].keys()
else:
num = original[original[ct.default_int64_field_name] == _id].index.to_list()[0]
num = original[original[pk_name] == _id].index.to_list()[0]
assert original[field][num] == entity[field]
return True

View File

@ -699,6 +699,7 @@ def gen_float_vec_field(name=ct.default_float_vec_field_name, is_primary=False,
description=description, dim=dim,
is_primary=is_primary, **kwargs)
else:
# no dim for sparse vector
float_vec_field, _ = ApiFieldSchemaWrapper().init_field_schema(name=name, dtype=DataType.SPARSE_FLOAT_VECTOR,
description=description,
is_primary=is_primary, **kwargs)
@ -1119,39 +1120,6 @@ def gen_schema_multi_string_fields(string_fields):
return schema
def gen_vectors(nb, dim, vector_data_type=DataType.FLOAT_VECTOR):
vectors = []
if vector_data_type == DataType.FLOAT_VECTOR:
vectors = [[random.random() for _ in range(dim)] for _ in range(nb)]
elif vector_data_type == DataType.FLOAT16_VECTOR:
vectors = gen_fp16_vectors(nb, dim)[1]
elif vector_data_type == DataType.BFLOAT16_VECTOR:
vectors = gen_bf16_vectors(nb, dim)[1]
elif vector_data_type == DataType.SPARSE_FLOAT_VECTOR:
vectors = gen_sparse_vectors(nb, dim)
elif vector_data_type == ct.text_sparse_vector:
vectors = gen_text_vectors(nb)
elif vector_data_type == DataType.BINARY_VECTOR:
vectors = gen_binary_vectors(nb, dim)[1]
else:
log.error(f"Invalid vector data type: {vector_data_type}")
raise Exception(f"Invalid vector data type: {vector_data_type}")
if dim > 1:
if vector_data_type == DataType.FLOAT_VECTOR:
vectors = preprocessing.normalize(vectors, axis=1, norm='l2')
vectors = vectors.tolist()
return vectors
def gen_text_vectors(nb, language="en"):
fake = Faker("en_US")
if language == "zh":
fake = Faker("zh_CN")
vectors = [" milvus " + fake.text() for _ in range(nb)]
return vectors
def gen_string(nb):
string_values = [str(random.random()) for _ in range(nb)]
return string_values
@ -3318,28 +3286,38 @@ def gen_sparse_vectors(nb, dim=1000, sparse_format="dok", empty_percentage=0):
return vectors
def gen_vectors_based_on_vector_type(num, dim, vector_data_type=DataType.FLOAT_VECTOR):
"""
generate float16 vector data
raw_vectors : the vectors
fp16_vectors: the bytes used for insert
return: raw_vectors and fp16_vectors
"""
def gen_vectors(nb, dim, vector_data_type=DataType.FLOAT_VECTOR):
vectors = []
if vector_data_type == DataType.FLOAT_VECTOR:
vectors = [[random.random() for _ in range(dim)] for _ in range(num)]
vectors = [[random.random() for _ in range(dim)] for _ in range(nb)]
elif vector_data_type == DataType.FLOAT16_VECTOR:
vectors = gen_fp16_vectors(num, dim)[1]
vectors = gen_fp16_vectors(nb, dim)[1]
elif vector_data_type == DataType.BFLOAT16_VECTOR:
vectors = gen_bf16_vectors(num, dim)[1]
vectors = gen_bf16_vectors(nb, dim)[1]
elif vector_data_type == DataType.SPARSE_FLOAT_VECTOR:
vectors = gen_sparse_vectors(num, dim)
vectors = gen_sparse_vectors(nb, dim)
elif vector_data_type == ct.text_sparse_vector:
vectors = gen_text_vectors(num)
vectors = gen_text_vectors(nb) # for Full Text Search
elif vector_data_type == DataType.BINARY_VECTOR:
vectors = gen_binary_vectors(nb, dim)[1]
else:
raise Exception("vector_data_type is invalid")
log.error(f"Invalid vector data type: {vector_data_type}")
raise Exception(f"Invalid vector data type: {vector_data_type}")
if dim > 1:
if vector_data_type == DataType.FLOAT_VECTOR:
vectors = preprocessing.normalize(vectors, axis=1, norm='l2')
vectors = vectors.tolist()
return vectors
def gen_text_vectors(nb, language="en"):
fake = Faker("en_US")
if language == "zh":
fake = Faker("zh_CN")
vectors = [" milvus " + fake.text() for _ in range(nb)]
return vectors
def field_types() -> dict:
return dict(sorted(dict(DataType.__members__).items(), key=lambda item: item[0], reverse=True))

View File

@ -19,6 +19,7 @@ default_batch_size = 1000
min_limit = 1
max_limit = 16384
max_top_k = 16384
max_nq = 16384
max_partition_num = 1024
max_role_num = 10
default_partition_num = 16 # default num_partitions for partition key feature
@ -27,6 +28,7 @@ default_server_segment_row_limit = 1024 * 512
default_alias = "default"
default_user = "root"
default_password = "Milvus"
default_primary_field_name = 'pk'
default_bool_field_name = "bool"
default_int8_field_name = "int8"
default_int16_field_name = "int16"

View File

@ -411,13 +411,14 @@ class TestMilvusClientAliasValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 4. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.release_collection(client, collection_name)
self.drop_collection(client, collection_name, check_task=CheckTasks.err_res,
check_items={ct.err_code: 65535,

View File

@ -349,7 +349,8 @@ class TestMilvusClientCollectionValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip(reason="issue 25110")
@ -382,13 +383,14 @@ class TestMilvusClientCollectionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 4. query
self.query(client, collection_name, filter="id in [0, 1]",
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -419,6 +421,7 @@ class TestMilvusClientCollectionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
self.drop_collection(client, collection_name)
@ -452,6 +455,7 @@ class TestMilvusClientCollectionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
self.drop_collection(client, collection_name)
@ -487,13 +491,14 @@ class TestMilvusClientCollectionValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
"limit": limit,
"pk_name": default_primary_key_field_name})
# 5. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -528,13 +533,14 @@ class TestMilvusClientCollectionValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
"limit": limit,
"pk_name": default_primary_key_field_name})
# 5. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)

View File

@ -408,13 +408,14 @@ class TestMilvusClientDatabaseValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 5. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
# 6. drop action
self.drop_collection(client, collection_name)
self.drop_database(client, db_name)
@ -463,13 +464,14 @@ class TestMilvusClientDatabaseValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 5. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
# 6. drop action
self.drop_collection(client, collection_name)
self.drop_database(client, db_name)

View File

@ -144,7 +144,7 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -167,6 +167,7 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"ids": insert_ids,
"limit": limit})
# 5. query
@ -174,7 +175,7 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -185,7 +186,7 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -209,13 +210,14 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
# 5. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -226,7 +228,7 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -252,13 +254,14 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
# 6. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -282,7 +285,7 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
expected: Delete and search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
json_field_name = "my_json"
schema = self.create_schema(client, enable_dynamic_field=enable_dynamic_field)[0]
@ -341,11 +344,12 @@ class TestMilvusClientDeleteValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
# 5. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)

View File

@ -365,7 +365,8 @@ class TestMilvusClientHybridSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -396,6 +397,7 @@ class TestMilvusClientHybridSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
self.drop_collection(client, collection_name)
@ -472,6 +474,7 @@ class TestMilvusClientHybridSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
sub_search1 = AnnSearchRequest(vectors_to_search, default_vector_field_name, {"level": 1}, 20,
expr=f"{json_field_name}['a']['b']>=10")
@ -484,5 +487,6 @@ class TestMilvusClientHybridSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
self.drop_collection(client, collection_name)

View File

@ -273,13 +273,14 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 7. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -317,13 +318,14 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 4. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -395,13 +397,14 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 9. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -449,13 +452,14 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 7. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -511,13 +515,14 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 7. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
# 8. insert more distinct value to the scalar field to make the autoindex change
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
@ -534,7 +539,8 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -575,13 +581,14 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 7. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -623,13 +630,14 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 8. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)

View File

@ -370,13 +370,14 @@ class TestMilvusClientInsertValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 4. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.release_collection(client, collection_name)
self.drop_collection(client, collection_name)
@ -417,7 +418,8 @@ class TestMilvusClientInsertValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -443,6 +445,7 @@ class TestMilvusClientInsertValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": [],
"pk_name": default_primary_key_field_name,
"limit": 0})
self.drop_collection(client, collection_name)
@ -479,7 +482,8 @@ class TestMilvusClientInsertValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# partition_number = self.get_partition_stats(client, collection_name, "_default")[0]
# assert partition_number == default_nb
# partition_number = self.get_partition_stats(client, collection_name, partition_name)[0]
@ -876,13 +880,14 @@ class TestMilvusClientUpsertValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 4. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.release_collection(client, collection_name)
self.drop_collection(client, collection_name)
@ -909,6 +914,7 @@ class TestMilvusClientUpsertValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": [],
"pk_name": default_primary_key_field_name,
"limit": 0})
self.drop_collection(client, collection_name)
@ -948,7 +954,8 @@ class TestMilvusClientUpsertValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# partition_number = self.get_partition_stats(client, collection_name, "_default")[0]
# assert partition_number == default_nb
# partition_number = self.get_partition_stats(client, collection_name, partition_name)[0]
@ -996,7 +1003,8 @@ class TestMilvusClientUpsertValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
if self.has_partition(client, collection_name, partition_name)[0]:
self.release_partitions(client, collection_name, partition_name)
self.drop_partition(client, collection_name, partition_name)

View File

@ -212,6 +212,7 @@ class TestMilvusClientPartitionValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 4. query
res = self.query(client, collection_name, filter=default_search_exp,
@ -219,7 +220,7 @@ class TestMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})[0]
"pk_name": default_primary_key_field_name})[0]
assert set(res[0].keys()) == {"ids", "vector"}
partition_number = self.get_partition_stats(client, collection_name, "_default")[0]

View File

@ -57,7 +57,7 @@ class TestMilvusClientQueryInvalid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = self.list_collections(client)[0]
@ -103,7 +103,7 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -116,13 +116,13 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
# 4. query using filter
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -133,7 +133,7 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -146,7 +146,7 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
# 4. query using filter
res = self.query(client, collection_name, filter=default_search_exp,
output_fields=[default_primary_key_field_name, default_float_field_name,
@ -154,7 +154,7 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})[0]
"pk_name": default_primary_key_field_name})[0]
assert set(res[0].keys()) == {default_primary_key_field_name, default_vector_field_name,
default_float_field_name, default_string_field_name}
self.drop_collection(client, collection_name)
@ -167,7 +167,7 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -180,14 +180,14 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
# 4. query using filter
res = self.query(client, collection_name, filter=default_search_exp,
output_fields=["*"],
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})[0]
"pk_name": default_primary_key_field_name})[0]
assert set(res[0].keys()) == {default_primary_key_field_name, default_vector_field_name,
default_float_field_name, default_string_field_name}
self.drop_collection(client, collection_name)
@ -200,7 +200,7 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -215,14 +215,14 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[:limit],
"with_vec": True,
"primary_field": default_primary_key_field_name[:limit]})
"pk_name": default_primary_key_field_name[:limit]})
# 4. query using filter
self.query(client, collection_name, filter=default_search_exp,
limit=limit,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[:limit],
"with_vec": True,
"primary_field": default_primary_key_field_name[:limit]})
"pk_name": default_primary_key_field_name[:limit]})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -234,7 +234,7 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
expected: query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -269,7 +269,7 @@ class TestMilvusClientQueryValid(TestMilvusClientV2Base):
expected: query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -312,7 +312,7 @@ class TestMilvusClientGetInvalid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -336,7 +336,7 @@ class TestMilvusClientGetInvalid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -362,7 +362,7 @@ class TestMilvusClientGetInvalid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -403,7 +403,7 @@ class TestMilvusClientGetValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -428,7 +428,7 @@ class TestMilvusClientGetValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -458,7 +458,7 @@ class TestMilvusClientGetValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length)
# 2. insert
@ -485,7 +485,7 @@ class TestMilvusClientGetValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length)
# 2. insert
@ -551,7 +551,7 @@ class TestMilvusClientQueryJsonPathIndex(TestMilvusClientV2Base):
with that without json path index
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
json_field_name = "json_field"
schema = self.create_schema(client, enable_dynamic_field=enable_dynamic_field)[0]

View File

@ -61,7 +61,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -83,7 +83,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -104,7 +104,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -125,7 +125,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -146,7 +146,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -167,7 +167,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -189,7 +189,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -211,7 +211,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -232,7 +232,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -254,7 +254,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -276,7 +276,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -299,7 +299,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
error = {ct.err_code: 1, ct.err_msg: f"Param id_type must be int or string"}
self.create_collection(client, collection_name, default_dim, id_type="invalid",
@ -313,7 +313,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
error = {ct.err_code: 65535, ct.err_msg: f"type param(max_length) should be specified for the "
f"field({default_primary_key_field_name}) of collection {collection_name}"}
@ -329,7 +329,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
2. Report errors for creating collection with same name and different params
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. create collection with same params
@ -349,7 +349,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
error = {ct.err_code: 1100,
ct.err_msg: "float vector index does not support metric type: invalid: "
@ -366,7 +366,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: Raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim)
# 2. search
@ -389,7 +389,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
schema = self.create_schema(client, enable_dynamic_field=False)[0]
@ -408,7 +408,6 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
# 3. search
vectors_to_search = rng.random((1, dim))
null_expr = default_vector_field_name + " " + null_expr_op
log.info(null_expr)
error = {ct.err_code: 65535,
ct.err_msg: f"unsupported data type: VECTOR_FLOAT"}
self.search(client, collection_name, vectors_to_search,
@ -424,7 +423,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
schema = self.create_schema(client, enable_dynamic_field=False)[0]
@ -444,7 +443,6 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
not_exist_field_name = "not_exist_field"
null_expr = not_exist_field_name + " " + null_expr_op
log.info(null_expr)
error = {ct.err_code: 1100,
ct.err_msg: f"failed to create query plan: cannot parse expression: "
f"{null_expr}, error: field {not_exist_field_name} not exist: invalid parameter"}
@ -462,7 +460,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -486,7 +484,6 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
null_expr = nullable_field_name + "['b']" + " " + null_expr_op
self.insert(client, collection_name, rows)
# 3. search
log.info(null_expr)
self.search(client, collection_name, [vectors[0]],
filter=null_expr)
@ -500,7 +497,7 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
expected: raise exception
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -524,7 +521,6 @@ class TestMilvusClientSearchInvalid(TestMilvusClientV2Base):
self.insert(client, collection_name, rows)
# 3. search
null_expr = nullable_field_name + "[0]" + " " + null_expr_op
log.info(null_expr)
error = {ct.err_code: 65535,
ct.err_msg: f"unsupported data type: ARRAY"}
self.search(client, collection_name, [vectors[0]],
@ -557,7 +553,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
self.using_database(client, "default")
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Bounded")
@ -583,13 +579,14 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 4. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.release_collection(client, collection_name)
self.drop_collection(client, collection_name)
@ -603,7 +600,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: create collection with default schema, index, and load successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 128
# 1. create collection
schema = self.create_schema(client, enable_dynamic_field=False)[0]
@ -634,7 +631,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Bounded")
collections = self.list_collections(client)[0]
@ -662,13 +659,14 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 4. query
self.query(client, new_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.release_collection(client, new_name)
self.drop_collection(client, new_name)
@ -680,7 +678,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = self.list_collections(client)[0]
@ -703,6 +701,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L2)
@ -714,7 +713,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length)
self.describe_collection(client, collection_name,
@ -735,13 +734,14 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 4. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -752,7 +752,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search successfully with limit(topK)
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id,
consistency_level="Strong")
@ -772,6 +772,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
self.drop_collection(client, collection_name)
@ -784,7 +785,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search successfully with limit(topK)
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id,
consistency_level="Strong")
@ -805,6 +806,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
self.drop_collection(client, collection_name)
@ -816,7 +818,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -840,13 +842,14 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
# 5. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -857,7 +860,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search/query successfully without deleted data
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
@ -881,13 +884,14 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
# 5. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -898,7 +902,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: search successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
schema = self.create_schema(client, enable_dynamic_field=False)[0]
dim = 32
@ -951,7 +955,7 @@ class TestMilvusClientSearchValid(TestMilvusClientV2Base):
expected: raise error
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
schema = self.create_schema(client, enable_dynamic_field=False)[0]
dim = 5
@ -1023,7 +1027,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1049,7 +1053,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1070,6 +1073,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L2)
@ -1082,7 +1086,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1110,7 +1114,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1131,6 +1134,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L2)
@ -1143,7 +1147,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1171,7 +1175,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1192,6 +1195,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L2)
@ -1204,7 +1208,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1232,7 +1236,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1253,6 +1256,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L2)
@ -1265,7 +1269,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1291,7 +1295,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1312,6 +1315,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L2)
@ -1324,7 +1328,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1350,7 +1354,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1371,6 +1374,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L2)
@ -1383,7 +1387,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1409,7 +1413,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1430,6 +1433,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L2)
@ -1442,7 +1446,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1468,7 +1472,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1489,6 +1492,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L1)
@ -1501,7 +1505,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1534,7 +1538,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1556,6 +1559,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L1)
@ -1568,7 +1572,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1609,7 +1613,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1631,6 +1634,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@pytest.mark.tags(CaseLabel.L1)
@ -1643,7 +1647,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
expected: search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 5
# 1. create collection
nullable_field_name = "nullable_field"
@ -1670,7 +1674,6 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
vectors_to_search = rng.random((1, dim))
insert_ids = [str(i) for i in range(default_nb)]
null_expr = nullable_field_name + " " + null_expr_op
log.info(null_expr)
if nullable:
if "not" in null_expr or "NOT" in null_expr:
insert_ids = []
@ -1692,6 +1695,7 @@ class TestMilvusClientSearchNullExpr(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": limit})
@ -1732,7 +1736,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
expected: Search successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
json_field_name = "my_json"
schema = self.create_schema(client, enable_dynamic_field=enable_dynamic_field)[0]
@ -1812,6 +1816,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 6. search with filter on json without output_fields
expr = f"{json_field_name}['a']['b'] == {default_nb / 2}"
@ -1823,6 +1828,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": 1})[0]
expr = f"{json_field_name} == {default_nb + 5}"
insert_ids = [default_nb+5]
@ -1833,6 +1839,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": 1})
expr = f"{json_field_name}['a'][0] == 1"
insert_ids = [i for i in range(default_nb + 20, default_nb + 30)]
@ -1843,6 +1850,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
expr = f"{json_field_name}['a'][0]['b'] == 1"
insert_ids = [i for i in range(default_nb + 30, default_nb + 40)]
@ -1853,6 +1861,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
expr = f"{json_field_name}['a'] == 1"
insert_ids = [i for i in range(default_nb + 50, default_nb + 60)]
@ -1863,6 +1872,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L2)
@ -1875,7 +1885,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
expected: successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
json_field_name = "my_json"
schema = self.create_schema(client, enable_dynamic_field=enable_dynamic_field)[0]
@ -1913,6 +1923,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": 1})
@pytest.mark.tags(CaseLabel.L2)
@ -1925,7 +1936,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
expected: successfully with original inverted index
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
schema = self.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
@ -1969,6 +1980,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L1)
@ -1984,7 +1996,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
if enable_dynamic_field:
pytest.skip('need to fix the field name when enabling dynamic field')
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
json_field_name = "my_json"
schema = self.create_schema(client, enable_dynamic_field=enable_dynamic_field)[0]
@ -2034,6 +2046,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L2)
@ -2048,7 +2061,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
expected: Search successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
json_field_name = "my_json"
schema = self.create_schema(client, enable_dynamic_field=enable_dynamic_field)[0]
@ -2102,6 +2115,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
expr = f"{json_field_name}1['a']['b'] >= 0"
vectors_to_search = [vectors[0]]
@ -2114,6 +2128,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L1)
@ -2137,7 +2152,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
expected: Search successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
json_field_name = "my_json"
schema = self.create_schema(client, enable_dynamic_field=enable_dynamic_field)[0]
@ -2217,6 +2232,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": 1})
expr = f"{json_field_name} == {default_nb + 5}"
insert_ids = [default_nb + 5]
@ -2227,6 +2243,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": 1})
expr = f"{json_field_name}['a'][0] == 1"
insert_ids = [i for i in range(default_nb + 20, default_nb + 30)]
@ -2237,6 +2254,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
expr = f"{json_field_name}['a'][0]['b'] == 1"
insert_ids = [i for i in range(default_nb + 30, default_nb + 40)]
@ -2247,6 +2265,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L2)
@ -2271,7 +2290,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
expected: Search successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
# 1. create collection
json_field_name = "my_json"
schema = self.create_schema(client, enable_dynamic_field=enable_dynamic_field)[0]
@ -2353,6 +2372,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": 1})
expr = f"{json_field_name} == {default_nb + 5}"
insert_ids = [default_nb + 5]
@ -2363,6 +2383,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": 1})
expr = f"{json_field_name}['a'][0] == 1"
insert_ids = [i for i in range(default_nb + 20, default_nb + 30)]
@ -2373,6 +2394,7 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})
expr = f"{json_field_name}['a'][0]['b'] == 1"
insert_ids = [i for i in range(default_nb + 30, default_nb + 40)]
@ -2383,4 +2405,5 @@ class TestMilvusClientSearchJsonPathIndex(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"pk_name": default_primary_key_field_name,
"limit": default_limit})

View File

@ -706,7 +706,9 @@ class TestMilvusClientSearchIteratorValid(TestMilvusClientV2Base):
res = self.search(client, collection_name, vectors_to_search,
search_params=search_params, limit=200,
check_task=CheckTasks.check_search_results,
check_items={"nq": 1, "limit": limit, "enable_milvus_client_api": True})[0]
check_items={"nq": 1, "limit": limit,
"enable_milvus_client_api": True,
"pk_name": default_primary_key_field_name})[0]
for limit in [batch_size - 3, batch_size, batch_size * 2, -1]:
if metric_type != "L2":
radius = res[0][limit // 2].get('distance', 0) - 0.1 # pick a radius to make sure there exists results
@ -967,7 +969,8 @@ class TestMilvusClientSearchIteratorValid(TestMilvusClientV2Base):
res = self.search(client, collection_name, vectors_to_search,
search_params=search_params, limit=limit,
check_task=CheckTasks.check_search_results,
check_items={"nq": 1, "limit": limit, "enable_milvus_client_api": True})[0]
check_items={"nq": 1, "limit": limit, "pk_name": default_primary_key_field_name,
"enable_milvus_client_api": True})[0]
for limit in [batch_size - 3, batch_size, batch_size * 2, -1]:
if metric_type != "L2":
radius = res[0][limit // 2].get('distance', 0) - 0.1 # pick a radius to make sure there exists results

View File

@ -106,6 +106,7 @@ class TestMilvusClientV2AliasOperation(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(search_vectors),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 6. create collection2 with index and load
@ -135,6 +136,7 @@ class TestMilvusClientV2AliasOperation(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(search_vectors),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
# 11. verify operations on collection1 still work

View File

@ -173,6 +173,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": "id",
"limit": default_limit
}
)
@ -194,7 +195,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": bool_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -210,7 +211,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": int8_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -226,7 +227,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": int16_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -242,7 +243,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": int32_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -258,7 +259,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": int64_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -274,7 +275,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": float_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -290,7 +291,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": double_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -306,7 +307,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": varchar_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -322,7 +323,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": varchar_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -338,7 +339,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": json_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -354,7 +355,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": array_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -370,7 +371,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": multi_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -386,7 +387,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": mix_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -403,7 +404,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": int8_not_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -419,7 +420,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": int16_not_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -435,7 +436,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": float_not_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -451,7 +452,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": double_not_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -467,7 +468,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": varchar_not_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -483,7 +484,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": json_not_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -499,7 +500,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": array_not_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -515,7 +516,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": multi_not_null_expected,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -534,7 +535,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": complex_mix_expected1,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -552,7 +553,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": complex_mix_expected2,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)
@ -570,7 +571,7 @@ class TestMilvusClientE2E(TestMilvusClientV2Base):
check_items={
"exp_res": complex_mix_expected3,
"with_vec": True,
"primary_field": "id"
"pk_name": "id"
}
)

View File

@ -169,7 +169,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
metrics = []
search_res_dict_array = []
search_res_dict_array_nq = []
vectors = cf.gen_vectors_based_on_vector_type(nq, dim, vector_data_type)
vectors = cf.gen_vectors(nq, dim, vector_data_type)
# get hybrid search req list
for i in range(len(vector_name_list)):
@ -197,6 +197,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit})[0]
ids = search_res[0].ids
distance_array = search_res[0].distances
@ -216,7 +217,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 8. compare results through the re-calculated distances
for k in range(len(score_answer_nq)):
for i in range(len(score_answer_nq[k][:default_limit])):
@ -258,7 +260,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
@pytest.mark.tags(CaseLabel.L1)
def test_hybrid_search_normal_expr(self):
@ -292,7 +295,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
# 5. hybrid search
collection_w.hybrid_search(req_list, WeightedRanker(*weights), default_limit,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq, "ids": insert_ids, "limit": default_limit})
check_items={"nq": nq, "ids": insert_ids, "limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="issue 32288")
@ -410,14 +414,16 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
search_res = collection_w.search(vectors[:nq], search_field,
default_search_params, default_limit,
default_search_exp,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 4. the effect of hybrid search to one field should equal to search
log.info("The distance list is:\n")
for i in range(nq):
@ -462,7 +468,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -519,7 +526,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -560,12 +568,14 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
hybrid_search_1 = collection_w.hybrid_search(req_list, WeightedRanker(0.1, 0.9), default_limit,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
for i in range(nq):
assert hybrid_search_0[i].ids == hybrid_search_1[i].ids
assert hybrid_search_0[i].distances == hybrid_search_1[i].distances
@ -614,7 +624,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
for k in range(nq):
id_list_nq[k].extend(search_res[k].ids)
# 5. prepare hybrid search params
@ -672,7 +683,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -716,7 +728,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": min_dim})[0]
"limit": min_dim,
"pk_name": ct.default_int64_field_name})[0]
id_list.extend(search_res[0].ids)
# 4. hybrid search
hybrid_search = collection_w.hybrid_search(req_list, WeightedRanker(0.1, 0.9), default_limit)[0]
@ -760,7 +773,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -802,7 +816,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -841,7 +856,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -880,7 +896,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -922,7 +939,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -961,7 +979,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip("issue: #29840")
@ -1000,7 +1019,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
# 5. hybrid search with two-dim list in WeightedRanker
weights = [[random.random() for _ in range(1)] for _ in range(len(req_list))]
# 4. hybrid search
@ -1008,7 +1028,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_hybrid_search_over_maximum_reqs_num(self):
@ -1089,7 +1110,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
@ -1130,7 +1152,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
ids = search_res[0].ids
for j in range(len(ids)):
search_res_dict[ids[j]] = 1 / (j + 60 + 1)
@ -1142,7 +1165,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 6. compare results through the re-calculated distances
for i in range(len(score_answer[:default_limit])):
assert score_answer[i] - hybrid_search_0[0].distances[i] < hybrid_search_epsilon
@ -1151,7 +1175,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
assert hybrid_search_0[0].ids == hybrid_search_1[0].ids
assert hybrid_search_0[0].distances == hybrid_search_1[0].distances
@ -1198,7 +1223,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
ids = search_res[0].ids
for j in range(len(ids)):
search_res_dict[ids[j]] = 1 / (j + k + 1)
@ -1211,7 +1237,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 6. compare results through the re-calculated distances
for i in range(len(score_answer[:default_limit])):
assert score_answer[i] - hybrid_res[0].distances[i] < hybrid_search_epsilon
@ -1257,7 +1284,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 5. hybrid search with offset parameter
req_list = []
for i in range(len(vector_name_list)):
@ -1274,7 +1302,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit - offset})[0]
"limit": default_limit - offset,
"pk_name": ct.default_int64_field_name})[0]
assert hybrid_res_inside[0].distances[offset:] == hybrid_res[0].distances
@ -1336,7 +1365,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
ids = search_res[0].ids
for j in range(len(ids)):
search_res_dict[ids[j]] = 1 / (j + k + 1)
@ -1348,7 +1378,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 6. compare results through the re-calculated distances
for i in range(len(score_answer[:default_limit])):
delta = math.fabs(score_answer[i] - hybrid_res[0].distances[i])
@ -1396,7 +1427,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": limit})[0]
"limit": limit,
"pk_name": ct.default_int64_field_name})[0]
ids = search_res[0].ids
distance_array = search_res[0].distances
for j in range(len(ids)):
@ -1410,7 +1442,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": limit})[0]
"limit": limit,
"pk_name": ct.default_int64_field_name})[0]
# 6. compare results through the re-calculated distances
for i in range(len(score_answer[:limit])):
delta = math.fabs(score_answer[i] - hybrid_res[0].distances[i])
@ -1515,7 +1548,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
metrics = []
search_res_dict_array = []
search_res_dict_array_nq = []
vectors = cf.gen_vectors_based_on_vector_type(nq, dim, vector_data_type)
vectors = cf.gen_vectors(nq, dim, vector_data_type)
# get hybrid search req list
for i in range(len(vector_name_list)):
@ -1543,7 +1576,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
ids = search_res[0].ids
distance_array = search_res[0].distances
for j in range(len(ids)):
@ -1563,7 +1597,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 8. compare results through the re-calculated distances
for k in range(len(score_answer_nq)):
for i in range(len(score_answer_nq[k][:default_limit])):
@ -1596,7 +1631,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
metrics = []
search_res_dict_array = []
search_res_dict_array_nq = []
vectors = cf.gen_vectors_based_on_vector_type(nq, dim, vector_data_type)
vectors = cf.gen_vectors(nq, dim, vector_data_type)
# get hybrid search req list
for i in range(len(vector_name_list)):
@ -1624,7 +1659,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
ids = search_res[0].ids
distance_array = search_res[0].distances
for j in range(len(ids)):
@ -1646,7 +1682,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 8. compare results through the re-calculated distances
for k in range(len(score_answer_nq)):
for i in range(len(score_answer_nq[k][:default_limit])):
@ -1679,7 +1716,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
metrics = []
search_res_dict_array = []
search_res_dict_array_nq = []
vectors = cf.gen_vectors_based_on_vector_type(nq, dim, vector_data_type)
vectors = cf.gen_vectors(nq, dim, vector_data_type)
# get hybrid search req list
for i in range(len(vector_name_list)):
@ -1707,7 +1744,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
ids = search_res[0].ids
distance_array = search_res[0].distances
for j in range(len(ids)):
@ -1726,7 +1764,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 8. compare results through the re-calculated distances
for k in range(len(score_answer_nq)):
for i in range(len(score_answer_nq[k][:default_limit])):
@ -1786,6 +1825,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit,
"pk_name": ct.default_int64_field_name,
"_async": _async})[0]
if _async:
search_res.done()
@ -1809,7 +1849,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit,
"_async": _async})[0]
"_async": _async,
"pk_name": ct.default_int64_field_name})[0]
if _async:
hybrid_res.done()
hybrid_res = hybrid_res.result()
@ -1880,7 +1921,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
metrics = []
search_res_dict_array = []
search_res_dict_array_nq = []
vectors = cf.gen_vectors_based_on_vector_type(nq, default_dim, vector_data_type)
vectors = cf.gen_vectors(nq, default_dim, vector_data_type)
# get hybrid search req list
for i in range(len(vector_name_list)):
@ -1908,7 +1949,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
ids = search_res[0].ids
distance_array = search_res[0].distances
for j in range(len(ids)):
@ -1926,7 +1968,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 8. compare results through the re-calculated distances
for k in range(len(score_answer_nq)):
for i in range(len(score_answer_nq[k][:default_limit])):
@ -2050,7 +2093,8 @@ class TestCollectionHybridSearchValid(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": 1,
"ids": insert_ids,
"limit": default_limit})[0]
"limit": default_limit,
"pk_name": ct.default_int64_field_name})[0]
# 6. compare results through the re-calculated distances
for i in range(len(score_answer[:default_limit])):
delta = math.fabs(score_answer[i] - hybrid_res[0].distances[i])

View File

@ -254,6 +254,7 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit})[0]
log.info("test_range_search_normal: checking the distance of top 1")
for hits in search_res:
@ -308,6 +309,7 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit})
# 4. range search with IP
range_search_params = {"metric_type": "IP",
@ -376,6 +378,7 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit})
# 4. range search with IP
range_search_params = {"metric_type": "IP",
@ -417,6 +420,7 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit,
"_async": _async})[0]
if _async:
@ -477,7 +481,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={
"nq": 1,
"limit": limit,
"ids": list(distances_index_max)
"ids": list(distances_index_max),
"pk_name": ct.default_int64_field_name,
})
@pytest.mark.tags(CaseLabel.L2)
@ -533,6 +538,7 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": limit,
"_async": _async})
# 3. delete partitions
@ -559,7 +565,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids[:entity_num],
"limit": limit - deleted_entity_num,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_range_search_collection_after_release_load(self, _async):
@ -599,7 +606,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_range_search_load_flush_load(self, _async):
@ -637,7 +645,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_range_search_new_data(self, nq):
@ -668,7 +677,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": nb_old})
"limit": nb_old,
"pk_name": ct.default_int64_field_name})
# 3. insert new data
nb_new = 300
_, _, _, insert_ids_new, time_stamp = cf.insert_data(collection_w, nb_new, dim=dim,
@ -685,7 +695,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": nb_old + nb_new})
"limit": nb_old + nb_new,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_range_search_different_data_distribution_with_index(self, _async):
@ -723,7 +734,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": default_limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip("not fixed yet")
@ -763,7 +775,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": default_limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index", range_search_supported_indexes)
@ -802,7 +815,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index", range_search_supported_indexes)
@ -841,7 +855,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_range_search_index_one_partition(self, _async):
@ -881,7 +896,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids[par[0].num_entities:],
"limit": limit_check,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index", ["BIN_FLAT", "BIN_IVF_FLAT"])
@ -917,7 +933,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": 2,
"_async": _async})[0]
"_async": _async,
"pk_name": ct.default_int64_field_name})[0]
if _async:
res.done()
res = res.result()
@ -952,7 +969,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": [],
"limit": 0})
"limit": 0,
"pk_name": ct.default_int64_field_name})
# 5. range search
search_params = {"metric_type": "JACCARD", "params": {"nprobe": 10, "radius": 10,
"range_filter": 2}}
@ -961,7 +979,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": [],
"limit": 0})
"limit": 0,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index", ["BIN_FLAT", "BIN_IVF_FLAT"])
@ -996,7 +1015,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": 2,
"_async": _async})[0]
"_async": _async,
"pk_name": ct.default_int64_field_name})[0]
if _async:
res.done()
res = res.result()
@ -1031,7 +1051,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": [],
"limit": 0})
"limit": 0,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip("tanimoto obsolete")
@ -1086,7 +1107,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": 1,
"ids": insert_ids,
"limit": limit,
"_async": _async})[0]
"_async": _async,
"pk_name": ct.default_int64_field_name})[0]
if _async:
res.done()
res = res.result()
@ -1122,7 +1144,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": [],
"limit": 0})
"limit": 0,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_range_search_binary_without_flush(self, metrics):
@ -1155,7 +1178,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
def test_range_search_with_expression(self, enable_dynamic_field):
@ -1202,7 +1226,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": min(nb, len(filter_ids)),
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
if _async:
search_res.done()
search_res = search_res.result()
@ -1221,7 +1246,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": min(nb, len(filter_ids)),
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
if _async:
search_res.done()
search_res = search_res.result()
@ -1255,7 +1281,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit,
"_async": _async})[0]
"_async": _async,
"pk_name": ct.default_int64_field_name})[0]
if _async:
res.done()
res = res.result()
@ -1290,7 +1317,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
# 2. search with multi-processes
log.info("test_range_search_concurrent_multi_threads: searching with %s processes" % threads_num)
@ -1408,7 +1436,7 @@ class TestCollectionRangeSearch(TestcaseBase):
"ids": insert_ids,
"limit": nb_old,
"_async": _async,
})
"pk_name": ct.default_int64_field_name})
kwargs = {}
consistency_level = kwargs.get(
@ -1454,7 +1482,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": nb_old,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
nb_new = 400
_, _, _, insert_ids_new, _ = cf.insert_data(collection_w, nb_new,
@ -1472,7 +1501,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": nb_old + nb_new,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_range_search_with_consistency_eventually(self, nq, _async):
@ -1501,7 +1531,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": nb_old,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
nb_new = 400
_, _, _, insert_ids_new, _ = cf.insert_data(collection_w, nb_new,
auto_id=auto_id, dim=dim,
@ -1542,7 +1573,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": nb_old,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
kwargs = {}
consistency_level = kwargs.get(
@ -1562,7 +1594,8 @@ class TestCollectionRangeSearch(TestcaseBase):
check_items={"nq": nq,
"ids": insert_ids,
"limit": nb_old + nb_new,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_range_search_sparse(self):

View File

@ -144,6 +144,7 @@ class TestSearchDiskann(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit,
"pk_name": ct.default_int64_field_name,
"_async": _async}
)
@ -177,7 +178,8 @@ class TestSearchDiskann(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_search_invalid_params_with_diskann_B(self):
@ -245,7 +247,8 @@ class TestSearchDiskann(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit}
"limit": default_limit,
"pk_name": ct.default_int64_field_name}
)
@pytest.mark.tags(CaseLabel.L2)
@ -294,7 +297,8 @@ class TestSearchDiskann(TestcaseBase):
check_items={"nq": default_nq,
"ids": ids,
"limit": default_limit,
"_async": _async}
"_async": _async,
"pk_name": ct.default_int64_field_name}
)
@pytest.mark.tags(CaseLabel.L2)
@ -345,7 +349,8 @@ class TestSearchDiskann(TestcaseBase):
check_items={"nq": default_nq,
"ids": ids,
"limit": default_limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
def test_search_with_scalar_field(self, _async):
@ -390,7 +395,8 @@ class TestSearchDiskann(TestcaseBase):
check_items={"nq": default_nq,
"ids": ids,
"limit": limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("limit", [10, 100, 1000])
@ -431,7 +437,8 @@ class TestSearchDiskann(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": limit,
"_async": _async}
"_async": _async,
"pk_name": ct.default_int64_field_name}
)
@pytest.mark.tags(CaseLabel.L2)
@ -472,4 +479,5 @@ class TestSearchDiskann(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit,
"_async": _async})
"_async": _async,
"pk_name": ct.default_int64_field_name})

View File

@ -737,7 +737,7 @@ class TestCollectionSearchInvalid(TestcaseBase):
% collection_w.name)
# err_msg = "collection" + collection_w.name + "was not loaded into memory"
err_msg = "collection not loaded"
vectors = cf.gen_vectors_based_on_vector_type(default_nq, default_dim, vector_data_type)
vectors = cf.gen_vectors(default_nq, default_dim, vector_data_type)
collection_w.search(vectors[:default_nq], default_search_field, default_search_params,
default_limit, default_search_exp, timeout=1,
check_task=CheckTasks.err_res,

View File

@ -178,7 +178,8 @@ class TestCollectionSearchJSON(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_search_json_nullable_load_before_insert(self, nq, is_flush, enable_dynamic_field):
@ -204,7 +205,8 @@ class TestCollectionSearchJSON(TestcaseBase):
default_search_params, default_limit,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="issue 37113")
@ -233,7 +235,8 @@ class TestCollectionSearchJSON(TestcaseBase):
default_search_params, default_limit,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"limit": default_limit})
"limit": default_limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
def test_search_expression_json_contains(self, enable_dynamic_field):
@ -270,7 +273,8 @@ class TestCollectionSearchJSON(TestcaseBase):
default_search_params, default_limit, expression,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": 3})
"limit": 3,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_search_expression_json_contains_list(self, auto_id):
@ -308,7 +312,8 @@ class TestCollectionSearchJSON(TestcaseBase):
default_search_params, limit, expression,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": limit})
"limit": limit,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L2)
def test_search_expression_json_contains_combined_with_normal(self, enable_dynamic_field):
@ -347,7 +352,8 @@ class TestCollectionSearchJSON(TestcaseBase):
default_search_params, limit, expression,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": limit // 2})
"limit": limit // 2,
"pk_name": ct.default_int64_field_name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("expr_prefix", ["array_contains", "ARRAY_CONTAINS"])

View File

@ -165,7 +165,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
vector_data_type=vector_data_type,
nullable_fields={ct.default_float_field_name: null_data_percent})[0:5]
# 2. generate search data
vectors = cf.gen_vectors_based_on_vector_type(nq, dim, vector_data_type)
vectors = cf.gen_vectors(nq, dim, vector_data_type)
# 3. search after insert
collection_w.search(vectors[:nq], default_search_field,
default_search_params, default_limit,
@ -176,6 +176,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit,
"output_fields": [default_int64_field_name,
default_float_field_name]})
@ -233,6 +234,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": limit,
"pk_name": ct.default_int64_field_name,
"_async": _async,
"output_fields": [ct.default_string_field_name,
ct.default_float_field_name]})
@ -251,7 +253,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
vector_data_type=vector_data_type,
default_value_fields={ct.default_float_field_name: np.float32(10.0)})[0:5]
# 2. generate search data
vectors = cf.gen_vectors_based_on_vector_type(nq, dim, vector_data_type)
vectors = cf.gen_vectors(nq, dim, vector_data_type)
# 3. search after insert
collection_w.search(vectors[:nq], default_search_field,
default_search_params, default_limit,
@ -262,6 +264,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit,
"output_fields": [default_int64_field_name,
default_float_field_name]})
@ -345,6 +348,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": limit,
"_async": _async,
"output_fields": output_fields})
@ -365,7 +369,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
nullable_fields={ct.default_float_field_name: 1},
default_value_fields={ct.default_float_field_name: np.float32(10.0)})[0:5]
# 2. generate search data
vectors = cf.gen_vectors_based_on_vector_type(nq, dim, vector_data_type)
vectors = cf.gen_vectors(nq, dim, vector_data_type)
# 3. search after insert
collection_w.search(vectors[:nq], default_search_field,
default_search_params, default_limit,
@ -376,6 +380,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit,
"output_fields": [default_int64_field_name,
default_float_field_name]})
@ -410,6 +415,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit,
"_async": _async,
"output_fields": [ct.default_float_field_name,
@ -458,6 +464,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": limit,
"pk_name": ct.default_int64_field_name,
"_async": _async,
"output_fields": [ct.default_string_field_name,
ct.default_float_field_name]})
@ -503,7 +510,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
loaded_fields.append(default_float_field_name)
collection_w.load(load_fields=loaded_fields)
# 3. generate search data
vectors = cf.gen_vectors_based_on_vector_type(default_nq, default_dim)
vectors = cf.gen_vectors(default_nq, default_dim)
# 4. search after partial load field with None data
output_fields = [default_int64_field_name, default_float_field_name]
collection_w.search(vectors[:default_nq], default_search_field,
@ -513,6 +520,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": ct.default_int64_field_name,
"limit": default_limit,
"output_fields": output_fields})
@ -536,7 +544,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
nullable_fields={ct.default_float_field_name: 0.5})[0:5]
collection_name = collection_w.name
# 2. generate search data
vectors = cf.gen_vectors_based_on_vector_type(default_nq, default_dim)
vectors = cf.gen_vectors(default_nq, default_dim)
# 3. search with expr "nullableFid == 0"
search_exp = f"{ct.default_float_field_name} == 0"
output_fields = [default_int64_field_name, default_float_field_name]
@ -548,6 +556,7 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": 1,
"pk_name": ct.default_int64_field_name,
"output_fields": output_fields})
# 4. drop collection
collection_w.drop()

View File

@ -83,7 +83,7 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
client = self._client()
# Create collection
collection_schema = self.create_schema(client, enable_dynamic_field=self.enable_dynamic_field)[0]
collection_schema = self.create_schema(client)[0]
collection_schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
collection_schema.add_field(self.float_vector_field_name, DataType.FLOAT_VECTOR, dim=128)
collection_schema.add_field(self.bfloat16_vector_field_name, DataType.BFLOAT16_VECTOR, dim=200)
@ -92,7 +92,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
collection_schema.add_field(default_float_field_name, DataType.FLOAT)
collection_schema.add_field(default_string_field_name, DataType.VARCHAR, max_length=256)
collection_schema.add_field(default_int64_field_name, DataType.INT64)
self.create_collection(client, self.collection_name, schema=collection_schema, force_teardown=False)
self.create_collection(client, self.collection_name, schema=collection_schema,
enable_dynamic_field=self.enable_dynamic_field, force_teardown=False)
for partition_name in self.partition_names:
self.create_partition(client, self.collection_name, partition_name=partition_name)
@ -167,7 +168,7 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
metric_type="JACCARD",
index_type="BIN_IVF_FLAT",
params={"nlist": 128})
self.create_index(client, self.collection_name, index_params=index_params)
self.create_index(client, self.collection_name, index_params=index_params, timeout=300)
# Load collection
self.load_collection(client, self.collection_name)
@ -210,9 +211,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": default_nq,
"limit": limit,
"metric": "COSINE",
"vector_nq": vectors_to_search[:default_nq],
"original_vectors": [self.datas[i][self.float_vector_field_name] for i in range(len(self.datas))]
"pk_name": default_primary_key_field_name,
"metric": "COSINE"
}
)
all_pages_results.append(search_res_with_offset)
@ -268,7 +268,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq,
"limit": limit
"limit": limit,
"pk_name": default_primary_key_field_name
}
)
all_pages_results.append(search_res_with_offset)
@ -325,7 +326,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq,
"limit": limit
"limit": limit,
"pk_name": default_primary_key_field_name
}
)
all_pages_results.append(search_res_with_offset)
@ -381,7 +383,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq,
"limit": limit
"limit": limit,
"pk_name": default_primary_key_field_name
}
)
all_pages_results.append(search_res_with_offset)
@ -435,7 +438,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
search_params=search_param, limit=limit, check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq,
"limit": limit})
"limit": limit,
"pk_name": default_primary_key_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("offset", [0, 100])
@ -482,7 +486,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq,
"limit": limit}
"limit": limit,
"pk_name": default_primary_key_field_name}
)
# 4. search with offset+limit
@ -523,7 +528,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq,
"limit": limit}
"limit": limit,
"pk_name": default_primary_key_field_name}
)
# 7. search with offset+limit
@ -561,8 +567,8 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
collection_name = self.collection_name
vectors_to_search = cf.gen_vectors(default_nq, self.float_vector_dim)
# search with pagination in partition_1
limit = 50
pages = 10
limit = 20
pages = 5
for page in range(pages):
offset = page * limit
search_params = {"offset": offset}
@ -576,7 +582,9 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
limit=limit,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq, "limit": limit})
"nq": default_nq,
"limit": limit,
"pk_name": default_primary_key_field_name})
# assert every id in search_res_with_offset %3 ==1
for hits in search_res_with_offset:
@ -597,7 +605,9 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
limit=limit,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq, "limit": limit})
"nq": default_nq,
"limit": limit,
"pk_name": default_primary_key_field_name})
# assert every id in search_res_with_offset %3 ==1 or ==2
for hits in search_res_with_offset:
@ -623,7 +633,9 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
search_params=search_params, limit=default_limit,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq, "limit": default_limit})
"nq": default_nq,
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# search with offset = 0
offset = 0
search_params = {"offset": offset}
@ -632,7 +644,9 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
search_params=search_params, limit=default_limit,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq, "limit": default_limit})
"nq": default_nq,
"limit": default_limit,
"pk_name": default_primary_key_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("offset", [0, 20, 100, 200])
@ -655,7 +669,9 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
limit=limit,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq, "limit": limit})
"nq": default_nq,
"limit": limit,
"pk_name": default_primary_key_field_name})
# 2. search with offset in search
search_params = {}
@ -666,7 +682,9 @@ class TestMilvusClientSearchPagination(TestMilvusClientV2Base):
limit=limit,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": default_nq, "limit": limit})
"nq": default_nq,
"limit": limit,
"pk_name": default_primary_key_field_name})
# 3. compare results
assert res1 == res2
@ -769,7 +787,7 @@ class TestSearchPaginationIndependent(TestMilvusClientV2Base):
"nq": default_nq,
"limit": limit,
"metric": metric_type,
}
"pk_name": default_primary_key_field_name}
)
all_pages_results.append(search_res_with_offset)
@ -805,6 +823,7 @@ class TestSearchPaginationIndependent(TestMilvusClientV2Base):
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.tags(CaseLabel.GPU)
@pytest.mark.parametrize('vector_dtype', ct.all_dense_vector_types)
@pytest.mark.parametrize('index', ct.all_index_types[:7])
@pytest.mark.parametrize('metric_type', ct.dense_metrics)

View File

@ -141,6 +141,7 @@ class TestSearchString(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": default_int64_field_name,
"limit": 1,
"_async": _async})
if _async:
@ -177,6 +178,7 @@ class TestSearchString(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": default_int64_field_name,
"limit": 1,
"_async": _async})
if _async:
@ -216,6 +218,7 @@ class TestSearchString(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": ct.default_string_field_name,
"limit": default_limit,
"_async": _async})
@ -259,6 +262,7 @@ class TestSearchString(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit,
"pk_name": ct.default_string_field_name,
"_async": _async})
@pytest.mark.tags(CaseLabel.L2)
@ -291,6 +295,7 @@ class TestSearchString(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": default_int64_field_name,
"limit": default_limit,
"_async": _async})
@ -363,6 +368,7 @@ class TestSearchString(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": default_int64_field_name,
"limit": min(nb, len(filter_ids)),
"_async": _async})
if _async:
@ -404,6 +410,7 @@ class TestSearchString(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": 2,
"pk_name": ct.default_string_field_name,
"_async": _async})
@pytest.mark.tags(CaseLabel.L2)
@ -436,6 +443,7 @@ class TestSearchString(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": 2,
"pk_name": default_int64_field_name,
"_async": _async})
@pytest.mark.tags(CaseLabel.L2)
@ -472,6 +480,7 @@ class TestSearchString(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": default_int64_field_name,
"limit": default_limit,
"_async": _async})
@ -512,6 +521,7 @@ class TestSearchString(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": 1,
"pk_name": default_int64_field_name,
"_async": _async}
)
@ -552,6 +562,7 @@ class TestSearchString(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": 1,
"pk_name": default_int64_field_name,
"_async": _async}
)
@ -597,6 +608,7 @@ class TestSearchString(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": default_limit,
"pk_name": ct.default_string_field_name,
"_async": _async})
@pytest.mark.tags(CaseLabel.L2)
@ -683,6 +695,7 @@ class TestSearchString(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
"pk_name": default_int64_field_name,
"limit": default_limit,
"_async": _async})
@ -722,6 +735,7 @@ class TestSearchString(TestcaseBase):
check_items={"nq": default_nq,
"ids": insert_ids,
"limit": 1,
"pk_name": default_int64_field_name,
"_async": _async})
if _async:
res.done()

File diff suppressed because it is too large Load Diff

View File

@ -114,7 +114,6 @@ class TestSparseSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": default_limit,
"original_entities": [data],
"output_fields": [ct.default_sparse_vec_field_name]})
expr = "int64 < 100 "
collection_w.search(data[-1][0:default_nq], ct.default_sparse_vec_field_name,
@ -123,7 +122,6 @@ class TestSparseSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": default_limit,
"original_entities": [data],
"output_fields": [ct.default_sparse_vec_field_name]})
@pytest.mark.tags(CaseLabel.L2)
@ -193,7 +191,6 @@ class TestSparseSearch(TestcaseBase):
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": default_limit,
"original_entities": [all_data],
"output_fields": [ct.default_sparse_vec_field_name]})
expr_id_list = [0, 1, 10, 100]
term_expr = f'{ct.default_int64_field_name} in {expr_id_list}'

View File

@ -31,7 +31,6 @@ pytest-random-order
pymilvus==2.6.0rc119
pymilvus[bulk_writer]==2.6.0rc119
# for customize config test
python-benedict==0.24.3
timeout-decorator==0.5.0

View File

@ -175,6 +175,7 @@ class TestAsyncMilvusClientCollectionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"pk_name": default_primary_key_field_name,
"limit": default_limit})
tasks.append(search_task)
# 5. query
@ -182,7 +183,7 @@ class TestAsyncMilvusClientCollectionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
tasks.append(query_task)
res = await asyncio.gather(*tasks)
@ -204,13 +205,14 @@ class TestAsyncMilvusClientCollectionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 11. query
await async_client.query(collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
# 12. drop action
if self.has_partition(client, collection_name, partition_name)[0]:

View File

@ -62,7 +62,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(default_search_task)
# search with filter & search_params
@ -73,7 +74,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(filter_params_search_task)
# search output fields
@ -82,7 +84,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(output_search_task)
# query with filter and default output "*"
@ -92,7 +95,7 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
output_fields=[default_pk_name],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": exp_query_res,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(filter_query_task)
# query with ids and output all fields
ids_query_task = self.async_milvus_client_wrap.query(c_name,
@ -101,7 +104,7 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows[:ct.default_limit],
"with_vec": True,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(ids_query_task)
# get with ids
get_task = self.async_milvus_client_wrap.get(c_name,
@ -109,7 +112,7 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
output_fields=[default_pk_name, default_vector_name],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows[:2], "with_vec": True,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(get_task)
await asyncio.gather(*tasks)
@ -158,7 +161,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(default_search_task)
# search with filter & search_params
@ -170,7 +174,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(filter_params_search_task)
# search output fields
@ -180,7 +185,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(output_search_task)
# query with filter and default output "*"
@ -191,7 +197,7 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
partition_names=[p_name],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": exp_query_res,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(filter_query_task)
# query with ids and output all fields
ids_query_task = self.async_milvus_client_wrap.query(c_name,
@ -201,7 +207,7 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows[:ct.default_limit],
"with_vec": True,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(ids_query_task)
# get with ids
get_task = self.async_milvus_client_wrap.get(c_name,
@ -209,7 +215,7 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
output_fields=[default_pk_name, default_vector_name],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows[:2], "with_vec": True,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(get_task)
await asyncio.gather(*tasks)
@ -283,7 +289,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(default_search_task)
# hybrid_search
@ -309,7 +316,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_items={
"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": 5})
"limit": 5,
"pk_name": default_pk_name})
tasks.append(filter_params_search_task)
# get with ids
@ -416,7 +424,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(default_search_task)
# query with filter and default output "*"
@ -426,7 +435,7 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
output_fields=[default_pk_name],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": exp_query_res,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(filter_query_task)
# get with ids
@ -435,7 +444,7 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
output_fields=[default_pk_name, default_vector_name],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows[:2], "with_vec": True,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(get_task)
await asyncio.gather(*tasks)
@ -495,7 +504,8 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": ct.default_nq,
"limit": ct.default_limit})
"limit": ct.default_limit,
"pk_name": default_pk_name})
tasks.append(default_search_task)
# query with filter and default output "*"
@ -505,6 +515,6 @@ class TestAsyncMilvusClient(TestMilvusClientV2Base):
output_fields=[default_pk_name],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": exp_query_res,
"primary_field": default_pk_name})
"pk_name": default_pk_name})
tasks.append(filter_query_task)
await asyncio.gather(*tasks)

View File

@ -323,7 +323,8 @@ class TestAsyncMilvusClientIndexValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
tasks.append(search_task)
# 6. query
query_task = self.async_milvus_client_wrap. \
@ -331,7 +332,7 @@ class TestAsyncMilvusClientIndexValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
tasks.append(query_task)
res = await asyncio.gather(*tasks)

View File

@ -669,7 +669,8 @@ class TestAsyncMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
tasks.append(search_task)
# 5. query
query_task = async_client.query(collection_name, filter=default_search_exp,
@ -677,7 +678,7 @@ class TestAsyncMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
tasks.append(query_task)
res = await asyncio.gather(*tasks)
@ -744,7 +745,8 @@ class TestAsyncMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
tasks.append(search_task)
# search multi partition
search_task_multi = async_client.search(collection_name, vectors_to_search,
@ -752,7 +754,8 @@ class TestAsyncMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
tasks.append(search_task_multi)
# query single partition
query_task = async_client.query(collection_name, filter=default_search_exp,
@ -760,7 +763,7 @@ class TestAsyncMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows_1,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
tasks.append(query_task)
# query multi partition
query_task_multi = async_client.query(collection_name, filter=default_search_exp,
@ -768,7 +771,7 @@ class TestAsyncMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows_1 + rows_2,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
tasks.append(query_task_multi)
res = await asyncio.gather(*tasks)
# 5. release partitions, search and query
@ -789,13 +792,14 @@ class TestAsyncMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
await async_client.query(collection_name, filter=default_search_exp,
partition_names=[partition_name_2],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows_2,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
# 6. load partitions, search and query
tasks_after_load = []
@ -804,13 +808,14 @@ class TestAsyncMilvusClientPartitionValid(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
tasks_after_load.append(search_task)
query_task = async_client.query(collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={"exp_res": rows_default + rows_1 + rows_2,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
tasks_after_load.append(query_task)
res = await asyncio.gather(*tasks_after_load)

View File

@ -1294,7 +1294,7 @@ class TestCollectionDataframe(TestcaseBase):
df = pd.DataFrame(data=mix_data, columns=list("ABC"))
error = {ct.err_code: 1,
ct.err_msg: "The Input data type is inconsistent with defined schema, "
"{C} field should be a float_vector, but got a {<class 'list'>} instead."}
"{C} field should be a FLOAT_VECTOR, but got a {<class 'list'>} instead."}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field='A', check_task=CheckTasks.err_res,
check_items=error)
@ -4725,7 +4725,7 @@ class TestCollectionDefaultValueValid(TestcaseBase):
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="issue 36457")
# @pytest.mark.skip(reason="issue 36457")
def test_create_collection_default_value_twice(self):
"""
target: test create collection with set default value twice
@ -4740,7 +4740,7 @@ class TestCollectionDefaultValueValid(TestcaseBase):
int_fields.append(cf.gen_float_field(default_value=numpy.float32(10.0)))
int_fields.append(cf.gen_float_vec_field())
schema = cf.gen_collection_schema(fields=int_fields)
self.collection_wrap.init_collection(c_name, schema=schema)
c=self.collection_wrap.init_collection(c_name, schema=schema)[0]
self.collection_wrap.init_collection(c_name, schema=schema)
@pytest.mark.tags(CaseLabel.L1)

View File

@ -1051,8 +1051,9 @@ class TestCompactionOperation(TestcaseBase):
collection_w.query(expr, check_task=CheckTasks.check_query_empty)
expr_1 = f'{ct.default_int64_field_name} in {[1]}'
collection_w.query(expr_1, check_task=CheckTasks.check_query_results, check_items={
'exp_res': [{'int64': 1}]})
collection_w.query(expr_1, check_task=CheckTasks.check_query_results,
check_items={'exp_res': [{'int64': 1}],
"pk_name": collection_w.primary_field.name,})
@pytest.mark.tags(CaseLabel.L1)
def test_compact_cross_shards(self):

View File

@ -892,6 +892,7 @@ class TestDatabaseOtherApi(TestcaseBase):
partition_names=[partition_name],
check_task=CheckTasks.check_query_iterator,
check_items={"count": 1000,
"pk_name": self.database_wrap.primary_field.name,
"batch_size": ct.default_limit * 10})
def prepare_data_for_db_search(self):

View File

@ -214,7 +214,8 @@ class TestDeleteParams(TestcaseBase):
expr = f'{ct.default_int64_field_name} in {[tmp_nb]}'
collection_w.delete(expr=expr)
collection_w.query(tmp_expr, check_task=CheckTasks.check_query_results,
check_items={exp_res: query_res_tmp_expr})
check_items={'exp_res': query_res_tmp_expr,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_delete_part_not_existed_values(self):
@ -284,7 +285,9 @@ class TestDeleteParams(TestcaseBase):
collection_w.query(tmp_expr, check_task=CheckTasks.check_query_empty, partition_names=[partition_w.name])
res = df.iloc[1:2, :1].to_dict('records')
collection_w.query(f'{ct.default_int64_field_name} in [1]',
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_delete_default_partition(self):
@ -414,10 +417,8 @@ class TestDeleteOperation(TestcaseBase):
search_res, _ = collection_w.search([df[ct.default_float_vec_field_name][0]],
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit)
log.debug(search_res[0].ids)
# assert search results not contains deleted ids
inter = set(insert_res.primary_keys[:ct.default_nb // 2]).intersection(set(search_res[0].ids))
log.debug(inter)
assert len(inter) == 0
@pytest.mark.tags(CaseLabel.L1)
@ -461,7 +462,9 @@ class TestDeleteOperation(TestcaseBase):
res = df_same.iloc[-2:, [0, 1, -1]].to_dict('records')
collection_w.query(expr=f'{ct.default_int64_field_name} >= {tmp_nb-1}',
output_fields=[ct.default_float_vec_field_name, ct.default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={'exp_res': res, 'with_vec': True})
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res, 'with_vec': True,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_delete_query_delta_logs(self):
@ -497,7 +500,9 @@ class TestDeleteOperation(TestcaseBase):
res = df_same.iloc[:, [0, 1, -1]].to_dict('records')
collection_w.query(expr=f'{ct.default_int64_field_name} < {L0_binlog_num_compaction+2}',
output_fields=[ct.default_float_vec_field_name, ct.default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={'exp_res': res, 'with_vec': True})
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res, 'with_vec': True,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_delete_search(self):
@ -521,7 +526,6 @@ class TestDeleteOperation(TestcaseBase):
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit)
# assert search result is not equal to entity
log.debug(f"Second search result ids: {search_res_2[0].ids}")
inter = set(ids[:ct.default_nb // 2]
).intersection(set(search_res_2[0].ids))
# Using bounded staleness, we could still search the "deleted" entities,
@ -555,7 +559,6 @@ class TestDeleteOperation(TestcaseBase):
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit)
# assert search result is not equal to entity
log.debug(f"Second search result ids: {search_res_2[0].ids}")
inter = set(ids[:ct.default_nb // 2]
).intersection(set(search_res_2[0].ids))
# Using bounded staleness, we could still search the "deleted" entities,
@ -651,8 +654,9 @@ class TestDeleteOperation(TestcaseBase):
# delete entities from another partition
expr = f'{ct.default_int64_field_name} in {[0]}'
collection_w.delete(expr, partition_name=ct.default_partition_name)
collection_w.query(expr, check_task=CheckTasks.check_query_results, check_items={
exp_res: query_res_tmp_expr})
collection_w.query(expr, check_task=CheckTasks.check_query_results,
check_items={'exp_res': query_res_tmp_expr,
"pk_name": collection_w.primary_field.name})
# delete entities from own partition
collection_w.delete(expr, partition_name=partition_w.name)
@ -685,7 +689,9 @@ class TestDeleteOperation(TestcaseBase):
# query on partition_w with id 0 and get an result
collection_w.query(tmp_expr, partition_names=[partition_w.name],
check_task=CheckTasks.check_query_results, check_items={exp_res: query_res_tmp_expr})
check_task=CheckTasks.check_query_results,
check_items={'exp_res': query_res_tmp_expr,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L0)
def test_delete_auto_id_collection(self):
@ -759,7 +765,9 @@ class TestDeleteOperation(TestcaseBase):
res = df_same.iloc[:, [0, 1, -1]].to_dict('records')
collection_w.query(expr=tmp_expr,
output_fields=[ct.default_float_vec_field_name, ct.default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={'exp_res': res, 'with_vec': True})
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res, 'with_vec': True,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_delete_growing_data_channel_delete(self):
@ -923,14 +931,15 @@ class TestDeleteOperation(TestcaseBase):
df_new = cf.gen_default_dataframe_data(4, start=tmp_nb)
df_new[ct.default_int64_field_name] = [0, 1, 3, 5]
collection_w.insert(df_new)
log.debug(f'to_flush:{to_flush}')
if to_flush:
log.debug(collection_w.num_entities)
# query entity
res = df_new.iloc[:, [0, 1, -1]].to_dict('records')
collection_w.query(del_expr, output_fields=[ct.default_float_vec_field_name, ct.default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={'exp_res': res, 'with_vec': True})
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res, 'with_vec': True,
"pk_name": collection_w.primary_field.name})
search_res, _ = collection_w.search(data=[df_new[ct.default_float_vec_field_name][0]],
anns_field=ct.default_float_vec_field_name,
param=default_search_params, limit=1)
@ -963,7 +972,9 @@ class TestDeleteOperation(TestcaseBase):
res = df.iloc[:1, :1].to_dict('records')
collection_w.search(data=[df[ct.default_float_vec_field_name][0]], anns_field=ct.default_float_vec_field_name,
param=default_search_params, limit=1)
collection_w.query(tmp_expr, check_task=CheckTasks.check_query_results, check_items={'exp_res': res})
collection_w.query(tmp_expr, check_task=CheckTasks.check_query_results,
check_items={'exp_res': res,
"pk_name": collection_w.primary_field.name})
# delete
collection_w.delete(tmp_expr)
@ -979,7 +990,9 @@ class TestDeleteOperation(TestcaseBase):
# re-query
res = df_new.iloc[[0], [0, 1, -1]].to_dict('records')
collection_w.query(tmp_expr, output_fields=[ct.default_float_vec_field_name, ct.default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={'exp_res': res, 'with_vec': True})
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res, 'with_vec': True,
"pk_name": collection_w.primary_field.name})
search_res, _ = collection_w.search(data=[df_new[ct.default_float_vec_field_name][0]],
anns_field=ct.default_float_vec_field_name,
param=default_search_params, limit=1)
@ -1060,7 +1073,9 @@ class TestDeleteOperation(TestcaseBase):
log.debug(collection_w.num_entities)
collection_w.query(tmp_expr, output_fields=[ct.default_float_vec_field_name],
check_task=CheckTasks.check_query_results,
check_items={'exp_res': df_new.iloc[[0], [0, 4]].to_dict('records'), 'with_vec': True})
check_items={'exp_res': df_new.iloc[[0], [0, 4]].to_dict('records'),
'with_vec': True,
"pk_name": collection_w.primary_field.name})
collection_w.delete(tmp_expr)
if to_flush_delete:
@ -1324,11 +1339,9 @@ class TestDeleteString(TestcaseBase):
search_res, _ = collection_w.search([df[ct.default_float_vec_field_name][0]],
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit)
log.debug(search_res[0].ids)
# assert search results not contains deleted ids
inter = set(insert_res.primary_keys[:ct.default_nb // 2]).intersection(set(search_res[0].ids))
log.debug(inter)
assert len(inter) == 0
assert len(inter) == 0, "assert no deleted ids in search results"
@pytest.mark.tags(CaseLabel.L1)
def test_delete_query_ids_both_L0_segment_and_WAL_with_string(self):
@ -1374,7 +1387,8 @@ class TestDeleteString(TestcaseBase):
collection_w.query(expr=default_string_expr,
output_fields=[ct.default_float_vec_field_name],
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res, 'with_vec': True, "primary_field": ct.default_string_field_name})
check_items={'exp_res': res, 'with_vec': True,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_delete_search_with_string(self):
@ -1400,7 +1414,6 @@ class TestDeleteString(TestcaseBase):
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit)
# assert search result is not equal to entity
log.debug(f"Second search result ids: {search_res_2[0].ids}")
inter = set(ids[:ct.default_nb // 2]
).intersection(set(search_res_2[0].ids))
# Using bounded staleness, we could still search the "deleted" entities,
@ -1483,7 +1496,9 @@ class TestDeleteString(TestcaseBase):
# query on partition_w with id 0 and get an result
collection_w.query(default_string_expr, partition_names=[partition_w.name],
check_task=CheckTasks.check_query_results, check_items={exp_res: query_tmp_expr_str})
check_task=CheckTasks.check_query_results,
check_items={'exp_res': query_tmp_expr_str,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_delete_sealed_segment_without_flush_with_string(self):
@ -1519,7 +1534,8 @@ class TestDeleteString(TestcaseBase):
collection_w.query(expr=default_string_expr,
output_fields=[ct.default_float_vec_field_name],
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res, 'with_vec': True, "primary_field": ct.default_string_field_name})
check_items={'exp_res': res, 'with_vec': True,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_delete_growing_data_channel_delete_with_string(self):
@ -1713,7 +1729,7 @@ class TestDeleteString(TestcaseBase):
collection_w.query(default_string_expr, output_fields=[ct.default_float_vec_field_name],
check_task=CheckTasks.check_query_results,
check_items={'exp_res': df_new.iloc[[0], [2, 4]].to_dict('records'),
'primary_field': ct.default_string_field_name, 'with_vec': True})
'pk_name': collection_w.primary_field.name, 'with_vec': True})
collection_w.delete(default_string_expr)
if to_flush_delete:
@ -1871,7 +1887,9 @@ class TestDeleteString(TestcaseBase):
res = df.iloc[:1, 2:3].to_dict('records')
collection_w.search(data=[df[ct.default_float_vec_field_name][0]], anns_field=ct.default_float_vec_field_name,
param=default_search_params, limit=1)
collection_w.query(default_string_expr, check_task=CheckTasks.check_query_results, check_items={'exp_res': res})
collection_w.query(default_string_expr, check_task=CheckTasks.check_query_results,
check_items={'exp_res': res,
"pk_name": collection_w.primary_field.name})
# delete
collection_w.delete(default_string_expr)
@ -1886,11 +1904,10 @@ class TestDeleteString(TestcaseBase):
# re-query
res = df_new.iloc[[0], [2, 4]].to_dict('records')
log.info(res)
collection_w.query(default_string_expr, output_fields=[ct.default_float_vec_field_name],
check_task=CheckTasks.check_query_results,
check_items={'exp_res': res,
'primary_field': ct.default_string_field_name,
'pk_name': collection_w.primary_field.name,
'with_vec': True})
collection_w.search(data=[df_new[ct.default_float_vec_field_name][0]],
anns_field=ct.default_float_vec_field_name,
@ -2495,7 +2512,6 @@ class TestCollectionSearchNoneAndDefaultData(TestcaseBase):
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit)
# assert search result is not equal to entity
log.debug(f"Second search result ids: {search_res_2[0].ids}")
inter = set(ids[:ct.default_nb // 2]
).intersection(set(search_res_2[0].ids))
# Using bounded staleness, we could still search the "deleted" entities,

View File

@ -162,13 +162,14 @@ class TestHighLevelApi(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 4. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -203,7 +204,8 @@ class TestHighLevelApi(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip(reason="issue 25110")
@ -235,13 +237,14 @@ class TestHighLevelApi(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
# 4. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@ -274,7 +277,8 @@ class TestHighLevelApi(TestMilvusClientV2Base):
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
"limit": default_limit,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@ -312,11 +316,12 @@ class TestHighLevelApi(TestMilvusClientV2Base):
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
"limit": limit,
"pk_name": default_primary_key_field_name})
# 6. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)

View File

@ -2173,9 +2173,10 @@ class TestUpsertValid(TestcaseBase):
collection_w.load()
for i in range(5):
collection_w.upsert(data=data)
collection_w.query(expr=f'{ct.default_int64_field_name} >= 0', output_fields=[ct.default_count_output]
, check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{"count(*)": ct.default_nb}]})
collection_w.query(expr=f'{ct.default_int64_field_name} >= 0',
output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{"count(*)": ct.default_nb}]})
class TestUpsertInvalid(TestcaseBase):

View File

@ -898,7 +898,8 @@ class TestInvertedIndexDQLExpr(TestCaseClassBase):
expr_count = len([i for i in self.insert_data.get(expr_field, []) if len(i) == length])
# query count(*)
self.collection_wrap.query(expr=expr, output_fields=['count(*)'], check_task=CheckTasks.check_query_results,
self.collection_wrap.query(expr=expr, output_fields=['count(*)'],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{"count(*)": expr_count}]})
@ -1223,7 +1224,8 @@ class TestBitmapIndexDQLExpr(TestCaseClassBase):
expr_count = len([i for i in self.insert_data.get(expr_field, []) if len(i) == length])
# query count(*)
self.collection_wrap.query(expr=expr, output_fields=['count(*)'], check_task=CheckTasks.check_query_results,
self.collection_wrap.query(expr=expr, output_fields=['count(*)'],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{"count(*)": expr_count}]})
@pytest.mark.tags(CaseLabel.L1)
@ -1239,7 +1241,8 @@ class TestBitmapIndexDQLExpr(TestCaseClassBase):
1. query response equal to insert nb
"""
# query count(*)
self.collection_wrap.query(expr='', output_fields=['count(*)'], check_task=CheckTasks.check_query_results,
self.collection_wrap.query(expr='', output_fields=['count(*)'],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{"count(*)": self.nb}]})
@pytest.mark.tags(CaseLabel.L1)
@ -1637,7 +1640,8 @@ class TestBitmapIndexOffsetCache(TestCaseClassBase):
expr_count = len([i for i in self.insert_data.get(expr_field, []) if len(i) == length])
# query count(*)
self.collection_wrap.query(expr=expr, output_fields=['count(*)'], check_task=CheckTasks.check_query_results,
self.collection_wrap.query(expr=expr, output_fields=['count(*)'],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{"count(*)": expr_count}]})
@pytest.mark.tags(CaseLabel.L1)
@ -1653,7 +1657,8 @@ class TestBitmapIndexOffsetCache(TestCaseClassBase):
1. query response equal to insert nb
"""
# query count(*)
self.collection_wrap.query(expr='', output_fields=['count(*)'], check_task=CheckTasks.check_query_results,
self.collection_wrap.query(expr='', output_fields=['count(*)'],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{"count(*)": self.nb}]})
@pytest.mark.tags(CaseLabel.L1)
@ -1908,7 +1913,8 @@ class TestBitmapIndexMmap(TestCaseClassBase):
1. query response equal to insert nb
"""
# query count(*)
self.collection_wrap.query(expr='', output_fields=['count(*)'], check_task=CheckTasks.check_query_results,
self.collection_wrap.query(expr='', output_fields=['count(*)'],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{"count(*)": self.nb}]})
@pytest.mark.tags(CaseLabel.L1)
@ -2132,8 +2138,10 @@ class TestMixScenes(TestcaseBase):
# query before upsert
expected_res = [{k: v[10] for k, v in insert_data.items() if k != DataType.FLOAT_VECTOR.name}]
self.collection_wrap.query(expr=expr, output_fields=scalar_fields, check_task=CheckTasks.check_query_results,
check_items={"exp_res": expected_res, "primary_field": primary_field})
self.collection_wrap.query(expr=expr, output_fields=scalar_fields,
check_task=CheckTasks.check_query_results,
check_items={"exp_res": expected_res,
"pk_name": primary_field})
# upsert int64_pk = 10
upsert_data = cf.gen_field_values(self.collection_wrap.schema, nb=1,
@ -2141,14 +2149,18 @@ class TestMixScenes(TestcaseBase):
self.collection_wrap.upsert(data=list(upsert_data.values()))
# re-query
expected_upsert_res = [{k: v[0] for k, v in upsert_data.items() if k != DataType.FLOAT_VECTOR.name}]
self.collection_wrap.query(expr=expr, output_fields=scalar_fields, check_task=CheckTasks.check_query_results,
check_items={"exp_res": expected_upsert_res, "primary_field": primary_field})
self.collection_wrap.query(expr=expr, output_fields=scalar_fields,
check_task=CheckTasks.check_query_results,
check_items={"exp_res": expected_upsert_res,
"pk_name": primary_field})
# delete int64_pk = 10
self.collection_wrap.delete(expr=expr)
# re-query
self.collection_wrap.query(expr=expr, output_fields=scalar_fields, check_task=CheckTasks.check_query_results,
check_items={"exp_res": []})
self.collection_wrap.query(expr=expr, output_fields=scalar_fields,
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [],
"pk_name": primary_field})
@pytest.mark.tags(CaseLabel.L2)
def test_bitmap_offset_cache_and_mmap(self, request):
@ -2207,8 +2219,10 @@ class TestMixScenes(TestcaseBase):
self.collection_wrap.load()
# query before upsert
self.collection_wrap.query(expr=expr, output_fields=scalar_fields, check_task=CheckTasks.check_query_results,
check_items={"exp_res": []})
self.collection_wrap.query(expr=expr, output_fields=scalar_fields,
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [],
"pk_name": primary_field})
# upsert int64_pk = 33333
upsert_data = cf.gen_field_values(self.collection_wrap.schema, nb=1,
@ -2216,14 +2230,18 @@ class TestMixScenes(TestcaseBase):
self.collection_wrap.upsert(data=list(upsert_data.values()))
# re-query
expected_upsert_res = [{k: v[0] for k, v in upsert_data.items() if k != DataType.FLOAT_VECTOR.name}]
self.collection_wrap.query(expr=expr, output_fields=scalar_fields, check_task=CheckTasks.check_query_results,
check_items={"exp_res": expected_upsert_res, "primary_field": primary_field})
self.collection_wrap.query(expr=expr, output_fields=scalar_fields,
check_task=CheckTasks.check_query_results,
check_items={"exp_res": expected_upsert_res,
"pk_name": primary_field})
# delete int64_pk = 33333
self.collection_wrap.delete(expr=expr)
# re-query
self.collection_wrap.query(expr=expr, output_fields=scalar_fields, check_task=CheckTasks.check_query_results,
check_items={"exp_res": []})
self.collection_wrap.query(expr=expr, output_fields=scalar_fields,
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [],
"pk_name": primary_field})
# search
expr_left, expr_right = Expr.GT(Expr.SUB('INT64', 37).subset, 13).value, Expr.LIKE('VARCHAR', '%a').value

View File

@ -397,16 +397,20 @@ class TestPartitionParams(TestcaseBase):
collection_w.create_index(ct.default_float_vec_field_name, ct.default_index)
partition_w.load(replica_number=1)
collection_w.query(expr=f"{ct.default_int64_field_name} in [0]", check_task=CheckTasks.check_query_results,
check_items={'exp_res': [{'int64': 0}]})
collection_w.query(expr=f"{ct.default_int64_field_name} in [0]",
check_task=CheckTasks.check_query_results,
check_items={'exp_res': [{'int64': 0}],
"pk_name": collection_w.primary_field.name})
error = {ct.err_code: 1100, ct.err_msg: "can't change the replica number for loaded partitions: "
"invalid parameter[expected=1][actual=2]"}
partition_w.load(replica_number=2, check_task=CheckTasks.err_res, check_items=error)
partition_w.release()
partition_w.load(replica_number=2)
collection_w.query(expr=f"{ct.default_int64_field_name} in [0]", check_task=CheckTasks.check_query_results,
check_items={'exp_res': [{'int64': 0}]})
collection_w.query(expr=f"{ct.default_int64_field_name} in [0]",
check_task=CheckTasks.check_query_results,
check_items={'exp_res': [{'int64': 0}],
"pk_name": collection_w.primary_field.name})
two_replicas, _ = collection_w.get_replicas()
assert len(two_replicas.groups) == 2

View File

@ -24,7 +24,8 @@ class TestPartitionKeyIsolation(TestcaseBase):
def test_par_key_isolation_with_valid_expr(self):
# create
self._connect()
collection_name = cf.gen_unique_str(prefix)
collection_name = cf.gen_collection_name_by_testcase_name()
dim = 128
partition_key = "scalar_6"
enable_isolation = "true"
if collection_name in list_collections():
@ -42,7 +43,7 @@ class TestPartitionKeyIsolation(TestcaseBase):
is_partition_key=bool(partition_key == "scalar_12")),
FieldSchema(name="scalar_5_linear", dtype=DataType.VARCHAR, max_length=1000,
is_partition_key=bool(partition_key == "scalar_5_linear")),
FieldSchema(name="emb", dtype=DataType.FLOAT_VECTOR, dim=768)
FieldSchema(name="emb", dtype=DataType.FLOAT_VECTOR, dim=dim)
]
schema = CollectionSchema(fields=fields, description="test collection", enable_dynamic_field=True,
num_partitions=1)
@ -72,12 +73,14 @@ class TestPartitionKeyIsolation(TestcaseBase):
"scalar_9": [str(i % 9) for i in range(start_idx, end_idx)],
"scalar_12": [str(i % 12) for i in range(start_idx, end_idx)],
"scalar_5_linear": [str(i % 5) for i in range(start_idx, end_idx)],
"emb": [[random.random() for _ in range(768)] for _ in range(batch_size)]
"emb": [[random.random() for _ in range(dim)] for _ in range(batch_size)]
}
df = pd.DataFrame(data)
all_data.append(df)
log.info(f"generate test data {batch_size} cost time {time.time() - t0}")
collection.insert(df)
num = collection.num_entities
log.info(f"collection {collection_name} loaded, num_entities: {num}")
all_df = pd.concat(all_data)
collection.compact()
collection.wait_for_compaction_completed()
@ -98,8 +101,6 @@ class TestPartitionKeyIsolation(TestcaseBase):
t0 = time.time()
collection.load()
log.info(f"load collection cost time {time.time() - t0}")
num = collection.num_entities
log.info(f"collection {collection_name} loaded, num_entities: {num}")
valid_expressions = [
"scalar_6 == '1' and scalar_12 == '1'",
@ -111,17 +112,15 @@ class TestPartitionKeyIsolation(TestcaseBase):
]
for expr in valid_expressions:
res = collection.search(
data=[[random.random() for _ in range(768)]],
data=[[random.random() for _ in range(dim)]],
anns_field="emb",
expr=expr,
param={"metric_type": "L2", "params": {"nprobe": 16}},
param={"metric_type": "L2", "params": {}},
limit=10000,
output_fields=["scalar_3", "scalar_6", "scalar_12"],
consistency_level="Strong"
)
log.info(f"search res {res}")
true_res = all_df.query(expr)
log.info(f"true res {true_res}")
assert len(res[0]) == len(true_res)
def test_par_key_isolation_with_unsupported_expr(self):

View File

@ -119,7 +119,9 @@ class TestQueryParams(TestcaseBase):
res = vectors[0].iloc[0:pos, :1].to_dict('records')
term_expr = f'{ct.default_int64_field_name} in {int_values[:pos]}'
collection_w.query(term_expr, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
collection_w.query(term_expr,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_no_collection(self):
@ -181,12 +183,14 @@ class TestQueryParams(TestcaseBase):
term_expr_1 = f'{ct.default_int64_field_name} in {ids[:pos]}'
for i in range(5):
res[i][ct.default_int64_field_name] = ids[i]
self.collection_wrap.query(term_expr_1, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
self.collection_wrap.query(term_expr_1,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": self.collection_wrap.primary_field.name})
# query with part primary keys
term_expr_2 = f'{ct.default_int64_field_name} in {[ids[0], 0]}'
self.collection_wrap.query(term_expr_2, check_task=CheckTasks.check_query_results,
check_items={exp_res: res[:1]})
check_items={exp_res: res[:1], "pk_name": self.collection_wrap.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("dup_times", [1, 2, 3])
@ -276,7 +280,9 @@ class TestQueryParams(TestcaseBase):
"""
collection_w, vectors = self.init_collection_general(prefix, insert_data=True)[0:2]
res = vectors[0].iloc[:2, :1].to_dict('records')
collection_w.query(default_term_expr, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
collection_w.query(default_term_expr,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_expr_not_existed_field(self):
@ -331,7 +337,8 @@ class TestQueryParams(TestcaseBase):
log.info(res)
self.collection_wrap.query(term_expr, output_fields=["*"],
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
check_items={exp_res: res, "with_vec": True,
"pk_name": self.collection_wrap.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_expr_by_bool_field(self):
@ -422,7 +429,8 @@ class TestQueryParams(TestcaseBase):
self.collection_wrap.create_index(ct.default_float_vec_field_name, index_params=ct.default_flat_index)
self.collection_wrap.load()
self.collection_wrap.query(term_expr, output_fields=["float", "int64", "int8", "varchar"],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": self.collection_wrap.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_with_expression(self, enable_dynamic_field):
@ -503,7 +511,8 @@ class TestQueryParams(TestcaseBase):
term_expr = f'{field} not in {values[pos:]}'
res = df.iloc[:pos, :3].to_dict('records')
self.collection_wrap.query(term_expr, output_fields=["float", "int64", "varchar"],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": self.collection_wrap.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("pos", [0, ct.default_nb])
@ -523,7 +532,9 @@ class TestQueryParams(TestcaseBase):
int64_values = df[ct.default_int64_field_name].tolist()
term_expr = f'{ct.default_int64_field_name} not in {int64_values[pos:]}'
res = df.iloc[:pos, :1].to_dict('records')
self.collection_wrap.query(term_expr, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
self.collection_wrap.query(term_expr,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": self.collection_wrap.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_expr_random_values(self):
@ -545,7 +556,9 @@ class TestQueryParams(TestcaseBase):
random_values = [0, 2, 4, 3]
term_expr = f'{ct.default_int64_field_name} in {random_values}'
res = df.iloc[random_values, :1].to_dict('records')
self.collection_wrap.query(term_expr, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
self.collection_wrap.query(term_expr,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": self.collection_wrap.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_expr_not_in_random(self):
@ -568,7 +581,9 @@ class TestQueryParams(TestcaseBase):
random.shuffle(random_values)
term_expr = f'{ct.default_int64_field_name} not in {random_values}'
res = df.iloc[:10, :1].to_dict('records')
self.collection_wrap.query(term_expr, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
self.collection_wrap.query(term_expr,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": self.collection_wrap.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_expr_non_array_term(self):
@ -1202,7 +1217,9 @@ class TestQueryParams(TestcaseBase):
res.append({ct.default_int64_field_name: ids})
# 2. query with limit
collection_w.query("", limit=limit, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
collection_w.query("", limit=limit,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_expr_empty_pk_string(self):
@ -1221,12 +1238,14 @@ class TestQueryParams(TestcaseBase):
# 2. query with limit
collection_w.query("", limit=ct.default_limit,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
# 2. query with limit + offset
res = res[5:]
collection_w.query("", limit=5, offset=5,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("offset", [100, 1000])
@ -1246,7 +1265,8 @@ class TestQueryParams(TestcaseBase):
# 2. query with limit and offset
collection_w.query("", limit=limit, offset=offset,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("offset", [100, 1000])
@ -1275,7 +1295,8 @@ class TestQueryParams(TestcaseBase):
res.append({ct.default_int64_field_name: ids, ct.default_string_field_name: str(ids)})
collection_w.query("", limit=limit, output_fields=[ct.default_string_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
# 4. query with pagination
exp_ids, res = sorted(unordered_ids)[:limit + offset][offset:], []
@ -1283,7 +1304,8 @@ class TestQueryParams(TestcaseBase):
res.append({ct.default_int64_field_name: ids, ct.default_string_field_name: str(ids)})
collection_w.query("", limit=limit, offset=offset, output_fields=[ct.default_string_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L0)
def test_query_expr_with_limit_offset_out_of_range(self):
@ -1400,7 +1422,8 @@ class TestQueryParams(TestcaseBase):
collection_w.load()
actual_res, _ = collection_w.query(default_term_expr, output_fields=all_fields,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
check_items={exp_res: res, "with_vec": True,
"pk_name": collection_w.primary_field.name})
assert set(actual_res[0].keys()) == set(all_fields)
@pytest.mark.tags(CaseLabel.L2)
@ -1422,7 +1445,8 @@ class TestQueryParams(TestcaseBase):
for output_fields in fields:
collection_w.query(default_term_expr, output_fields=output_fields,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
check_items={exp_res: res, "with_vec": True,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("wildcard_output_fields", [["*"], ["*", default_float_field_name],
@ -1471,7 +1495,7 @@ class TestQueryParams(TestcaseBase):
collection_w.load()
collection_w.query(default_term_expr, output_fields=output_fields,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
check_items={exp_res: res, "with_vec": True, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="https://github.com/milvus-io/milvus/issues/12680")
@ -1500,12 +1524,12 @@ class TestQueryParams(TestcaseBase):
collection_w.load()
collection_w.query(default_term_expr, output_fields=output_fields,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
check_items={exp_res: res, "with_vec": True, "pk_name": collection_w.primary_field.name})
# query with wildcard %
collection_w.query(default_term_expr, output_fields=["*"],
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
check_items={exp_res: res, "with_vec": True, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_output_binary_vec_field(self):
@ -1578,7 +1602,7 @@ class TestQueryParams(TestcaseBase):
res3 = df.iloc[:2].to_dict('records')
collection_w.query(default_term_expr, output_fields=["*"],
check_task=CheckTasks.check_query_results,
check_items={exp_res: res3, "with_vec": True})
check_items={exp_res: res3, "with_vec": True, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="issue 24637")
@ -1598,7 +1622,7 @@ class TestQueryParams(TestcaseBase):
collection_w.load()
collection_w.query(default_term_expr, output_fields=["*", ct.default_float_vec_field_name],
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
check_items={exp_res: res, "with_vec": True, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("output_fields", [["*%"], ["**"], ["*", "@"]])
@ -1633,7 +1657,8 @@ class TestQueryParams(TestcaseBase):
partition_w.load()
res = df.iloc[:2, :1].to_dict('records')
collection_w.query(default_term_expr, partition_names=[partition_w.name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_partition_without_loading(self):
@ -1661,7 +1686,8 @@ class TestQueryParams(TestcaseBase):
collection_w, vectors = self.init_collection_general(prefix, insert_data=True)[0:2]
res = vectors[0].iloc[:2, :1].to_dict('records')
collection_w.query(default_term_expr, partition_names=[ct.default_partition_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_empty_partition_names(self):
@ -1677,8 +1703,9 @@ class TestQueryParams(TestcaseBase):
# query from empty partition_names
term_expr = f'{ct.default_int64_field_name} in [0, {half}, {ct.default_nb}-1]'
res = [{'int64': 0}, {'int64': half}, {'int64': ct.default_nb - 1}]
collection_w.query(term_expr, partition_names=[], check_task=CheckTasks.check_query_results,
check_items={exp_res: res})
collection_w.query(term_expr, partition_names=[],
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_empty_partition(self):
@ -1797,7 +1824,7 @@ class TestQueryParams(TestcaseBase):
query_params = {"offset": offset, "limit": 10}
query_res = collection_w.query(term_expr, params=query_params,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res})[0]
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})[0]
key_res = [item[key] for item in query_res for key in item]
assert key_res == int_values[offset: pos + offset]
@ -1819,7 +1846,7 @@ class TestQueryParams(TestcaseBase):
query_params = {"offset": offset, "limit": 10}
query_res = collection_w.query(term_expr, params=query_params,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res})[0]
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})[0]
key_res = [item[key] for item in query_res for key in item]
assert key_res == int_values[offset: pos + offset]
@ -1875,7 +1902,8 @@ class TestQueryParams(TestcaseBase):
res = df.iloc[:2, :1].to_dict('records')
query_params = {"offset": offset, "limit": 10}
collection_w.query(default_term_expr, params=query_params, partition_names=[partition_w.name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_pagination_with_insert_data(self, offset):
@ -1893,7 +1921,8 @@ class TestQueryParams(TestcaseBase):
res = df.iloc[:2, :1].to_dict('records')
query_params = {"offset": offset, "limit": 10}
collection_w.query(default_term_expr, params=query_params,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_pagination_without_limit(self, offset):
@ -1911,10 +1940,10 @@ class TestQueryParams(TestcaseBase):
query_params = {"offset": offset}
query_res = collection_w.query(term_expr, params=query_params,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res})[0]
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})[0]
res = collection_w.query(term_expr,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res})[0]
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})[0]
assert query_res == res
@pytest.mark.tags(CaseLabel.L2)
@ -2059,12 +2088,14 @@ class TestQueryParams(TestcaseBase):
collection_w.load()
# 2. query with limit
collection_w.query("", limit=ct.default_limit,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
# 3. query with limit + offset
res = res[5:]
collection_w.query("", limit=5, offset=5,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_enable_mmap_query_with_expression(self, enable_dynamic_field):
@ -2180,7 +2211,8 @@ class TestQueryParams(TestcaseBase):
expression = 'varchar like "0%"'
output_fields = [default_int_field_name, default_float_field_name, default_string_field_name]
collection_w.query(expression, output_fields=output_fields,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
class TestQueryOperation(TestcaseBase):
@ -2249,7 +2281,8 @@ class TestQueryOperation(TestcaseBase):
# query the first row of data
check_vec = vectors[0].iloc[:, [0]][0:1].to_dict('records')
collection_w.query(term_expr,
check_task=CheckTasks.check_query_results, check_items={exp_res: check_vec})
check_task=CheckTasks.check_query_results,
check_items={exp_res: check_vec, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("term_expr", [f'{ct.default_int64_field_name} in [0]'])
@ -2267,7 +2300,8 @@ class TestQueryOperation(TestcaseBase):
# query the first row of data
check_vec = vectors[0].iloc[:, [0]][0:1].to_dict('records')
collection_w.query(term_expr,
check_task=CheckTasks.check_query_results, check_items={exp_res: check_vec})
check_task=CheckTasks.check_query_results,
check_items={exp_res: check_vec, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_expr_all_term_array(self):
@ -2288,7 +2322,8 @@ class TestQueryOperation(TestcaseBase):
# query all array value
collection_w.query(term_expr,
check_task=CheckTasks.check_query_results, check_items={exp_res: check_vec})
check_task=CheckTasks.check_query_results,
check_items={exp_res: check_vec, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_expr_half_term_array(self):
@ -2339,7 +2374,7 @@ class TestQueryOperation(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in {[0, 0, 0]}'
res = df.iloc[:, :2].to_dict('records')
collection_w.query(term_expr, output_fields=["*"], check_items=CheckTasks.check_query_results,
check_task={exp_res: res})
check_task={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("with_growing", [True])
@ -2397,7 +2432,8 @@ class TestQueryOperation(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in {int_values}'
check_vec = vectors[0].iloc[:, [0]][0:len(int_values)].to_dict('records')
collection_w.query(term_expr,
check_task=CheckTasks.check_query_results, check_items={exp_res: check_vec})
check_task=CheckTasks.check_query_results,
check_items={exp_res: check_vec, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_after_search(self):
@ -2426,7 +2462,8 @@ class TestQueryOperation(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in [0, 1]'
check_vec = vectors[0].iloc[:, [0]][0:2].to_dict('records')
collection_w.query(term_expr,
check_task=CheckTasks.check_query_results, check_items={exp_res: check_vec})
check_task=CheckTasks.check_query_results,
check_items={exp_res: check_vec, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_output_vec_field_after_index(self):
@ -2446,7 +2483,7 @@ class TestQueryOperation(TestcaseBase):
collection_w.load()
collection_w.query(default_term_expr, output_fields=fields,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
check_items={exp_res: res, "with_vec": True, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_output_binary_vec_field_after_index(self):
@ -2525,7 +2562,8 @@ class TestQueryOperation(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in [{half}]'
# half entity in _default partition rather than partition_w
collection_w.query(term_expr, partition_names=[partition_w.name],
check_task=CheckTasks.check_query_results, check_items={exp_res: []})
check_task=CheckTasks.check_query_results,
check_items={exp_res: [], "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_multi_partitions_multi_results(self):
@ -2584,7 +2622,8 @@ class TestQueryOperation(TestcaseBase):
res = df.iloc[1:2, :1].to_dict('records')
time.sleep(1)
collection_w.query(f'{ct.default_int64_field_name} in [1]',
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip("not support default_value now")
@ -2691,7 +2730,8 @@ class TestQueryString(TestcaseBase):
res = vectors[0].iloc[:2, :3].to_dict('records')
output_fields = [default_float_field_name, default_string_field_name]
collection_w.query(default_string_term_expr, output_fields=output_fields,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("expression", cf.gen_normal_string_expressions([default_string_field_name]))
@ -2719,7 +2759,8 @@ class TestQueryString(TestcaseBase):
res = vectors[0].iloc[:, 1:3].to_dict('records')
output_fields = [default_float_field_name, default_string_field_name]
collection_w.query(default_mix_expr, output_fields=output_fields,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("expression", cf.gen_invalid_string_expressions())
@ -2763,7 +2804,8 @@ class TestQueryString(TestcaseBase):
expression = 'varchar like "0%"'
output_fields = [default_int_field_name, default_float_field_name, default_string_field_name]
collection_w.query(expression, output_fields=output_fields,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_string_expr_with_suffix(self):
@ -2779,7 +2821,8 @@ class TestQueryString(TestcaseBase):
res = filtered_data.iloc[:, :3].to_dict('records')
output_fields = [default_int_field_name, default_float_field_name, default_string_field_name]
collection_w.query(expression, output_fields=output_fields,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_string_expr_with_inner_match(self):
@ -2795,7 +2838,8 @@ class TestQueryString(TestcaseBase):
res = filtered_data.iloc[:, :3].to_dict('records')
output_fields = [default_int_field_name, default_float_field_name, default_string_field_name]
collection_w.query(expression, output_fields=output_fields,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_bitmap_alter_offset_cache_param(self):
@ -2955,7 +2999,8 @@ class TestQueryString(TestcaseBase):
expression = 'float > int64'
output_fields = [default_int_field_name, default_float_field_name, default_string_field_name]
collection_w.query(expression, output_fields=output_fields,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_compare_invalid_fields(self):
@ -3018,7 +3063,7 @@ class TestQueryString(TestcaseBase):
collection_w.query(expression, output_fields=output_fields,
check_task=CheckTasks.check_query_results,
check_items={exp_res: df_dict_list,
"primary_field": default_int_field_name,
"pk_name": collection_w.primary_field.name,
"with_vec": True})
@pytest.mark.tags(CaseLabel.L2)
@ -3098,7 +3143,7 @@ class TestQueryString(TestcaseBase):
check_vec = vectors[0].iloc[:, [0]][0:len(int_values)].to_dict('records')
collection_w.query(term_expr,
check_task=CheckTasks.check_query_results,
check_items={exp_res: check_vec})
check_items={exp_res: check_vec, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_query_with_create_diskann_with_string_pk(self):
@ -3116,7 +3161,8 @@ class TestQueryString(TestcaseBase):
res = vectors[0].iloc[:, 1:3].to_dict('records')
output_fields = [default_float_field_name, default_string_field_name]
collection_w.query(default_mix_expr, output_fields=output_fields,
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_with_scalar_field(self):
@ -3306,7 +3352,8 @@ class TestQueryCount(TestcaseBase):
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb}]})
check_items={exp_res: [{count: ct.default_nb}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("invalid_output_field", ["count", "count(int64)", "count(**)"])
@ -3367,15 +3414,13 @@ class TestQueryCount(TestcaseBase):
# query count
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: tmp_nb}]}
)
check_items={exp_res: [{count: tmp_nb}],"pk_name": collection_w.primary_field.name})
# delete and verify count
collection_w.delete(default_term_expr)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 0}]}
)
check_items={exp_res: [{count: 0}], "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_count_multi_partitions(self):
@ -3395,7 +3440,7 @@ class TestQueryCount(TestcaseBase):
for p_name in [p1.name, ct.default_partition_name]:
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output], partition_names=[p_name],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: half}]})
check_items={exp_res: [{count: half}], "pk_name": collection_w.primary_field.name})
# delete entities from _default
delete_expr = f"{ct.default_int64_field_name} in {[i for i in range(half, ct.default_nb)]} "
@ -3403,11 +3448,11 @@ class TestQueryCount(TestcaseBase):
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
partition_names=[ct.default_partition_name],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 0}]})
check_items={exp_res: [{count: 0}], "pk_name": collection_w.primary_field.name})
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
partition_names=[p1.name, ct.default_partition_name],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: half}]})
check_items={exp_res: [{count: half}], "pk_name": collection_w.primary_field.name})
# drop p1 partition
p1.release()
@ -3420,7 +3465,7 @@ class TestQueryCount(TestcaseBase):
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
partition_names=[ct.default_partition_name],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 0}]})
check_items={exp_res: [{count: 0}], "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_count_partition_duplicate(self):
@ -3447,7 +3492,8 @@ class TestQueryCount(TestcaseBase):
# count
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb * 2}]}
check_items={exp_res: [{count: ct.default_nb * 2}],
"pk_name": collection_w.primary_field.name}
)
# delete some duplicate ids
@ -3455,7 +3501,8 @@ class TestQueryCount(TestcaseBase):
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
partition_names=[p1],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb - delete_res.delete_count}]}
check_items={exp_res: [{count: ct.default_nb - delete_res.delete_count}],
"pk_name": collection_w.primary_field.name}
)
@pytest.mark.tags(CaseLabel.L1)
@ -3473,15 +3520,14 @@ class TestQueryCount(TestcaseBase):
collection_w = self.init_collection_general(insert_data=True, nb=tmp_nb)[0]
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: tmp_nb}]}
)
check_items={exp_res: [{count: tmp_nb}], "pk_name": collection_w.primary_field.name})
# new insert and growing count
df = cf.gen_default_dataframe_data(nb=tmp_nb, start=tmp_nb)
collection_w.insert(df)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: tmp_nb * 2}]})
check_items={exp_res: [{count: tmp_nb * 2}], "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_count_during_handoff(self):
@ -3506,8 +3552,8 @@ class TestQueryCount(TestcaseBase):
kwargs={
"output_fields": [ct.default_count_output],
"check_task": CheckTasks.check_query_results,
"check_items": {exp_res: [{count: ct.default_nb}]}
})
"check_items": {exp_res: [{count: ct.default_nb}],
"pk_name": collection_w.primary_field.name}})
t_flush.start()
t_count.start()
@ -3534,7 +3580,8 @@ class TestQueryCount(TestcaseBase):
collection_w.delete(f"{ct.default_int64_field_name} in {[i for i in range(ct.default_nb)]}")
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: tmp_nb}]}
check_items={exp_res: [{count: tmp_nb}],
"pk_name": collection_w.primary_field.name}
)
# re-insert deleted ids [0, default_nb) with different vectors
@ -3542,7 +3589,8 @@ class TestQueryCount(TestcaseBase):
collection_w.insert(df_same)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb + tmp_nb}]}
check_items={exp_res: [{count: ct.default_nb + tmp_nb}],
"pk_name": collection_w.primary_field.name}
)
@pytest.mark.tags(CaseLabel.L1)
@ -3575,7 +3623,8 @@ class TestQueryCount(TestcaseBase):
# count after compact
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: tmp_nb * segment_num}]})
check_items={exp_res: [{count: tmp_nb * segment_num}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_count_compact_delete(self):
@ -3605,7 +3654,8 @@ class TestQueryCount(TestcaseBase):
collection_w.load()
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb // 2}]}
check_items={exp_res: [{count: ct.default_nb // 2}],
"pk_name": collection_w.primary_field.name}
)
@pytest.mark.tags(CaseLabel.L2)
@ -3634,7 +3684,8 @@ class TestQueryCount(TestcaseBase):
kwargs={
"output_fields": [ct.default_count_output],
"check_task": CheckTasks.check_query_results,
"check_items": {exp_res: [{count: tmp_nb * 10}]}
"check_items": {exp_res: [{count: tmp_nb * 10}],
"pk_name": collection_w.primary_field.name}
})
t_compact.start()
@ -3655,11 +3706,13 @@ class TestQueryCount(TestcaseBase):
# count with expr
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb}]})
check_items={exp_res: [{count: ct.default_nb}],
"pk_name": collection_w.primary_field.name})
collection_w.query(expr=default_term_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 2}]})
check_items={exp_res: [{count: 2}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_count_expr_json(self):
@ -3687,7 +3740,8 @@ class TestQueryCount(TestcaseBase):
expression = f'{ct.default_json_field_name}["number"] < 100'
collection_w.query(expression, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 50}]})
check_items={exp_res: [{count: 50}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_json_expr_on_search_n_query(self):
@ -3753,7 +3807,9 @@ class TestQueryCount(TestcaseBase):
for expr in query_exprs:
log.debug(f"query_expr: {expr}")
collection_w.query(expr=expr, output_fields=[count],
check_task=CheckTasks.check_query_results, check_items={exp_res: [{count: 10}]})
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 10}],
"pk_name": collection_w.primary_field.name})
collection_w.search(data=search_data, anns_field=ct.default_float_vec_field_name,
param=search_param, limit=10, expr=expr,
check_task=CheckTasks.check_search_results,
@ -3764,7 +3820,9 @@ class TestQueryCount(TestcaseBase):
f'{json_embedded_object}["{json_embedded_object}"] in []']:
log.debug(f"query_expr: {expr}")
collection_w.query(expr=expr, output_fields=[count],
check_task=CheckTasks.check_query_results, check_items={exp_res: [{count: 0}]})
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 0}],
"pk_name": collection_w.primary_field.name})
collection_w.search(data=search_data, anns_field=ct.default_float_vec_field_name,
param=search_param, limit=10, expr=expr,
check_task=CheckTasks.check_search_results,
@ -3783,8 +3841,8 @@ class TestQueryCount(TestcaseBase):
# only params offset is not considered pagination
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output], offset=10,
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb}]}
)
check_items={exp_res: [{count: ct.default_nb}],
"pk_name": collection_w.primary_field.name})
# count with limit
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output], limit=10,
check_task=CheckTasks.err_res,
@ -3819,7 +3877,8 @@ class TestQueryCount(TestcaseBase):
collection_w_alias.insert(cf.gen_default_dataframe_data(start=ct.default_nb), partition_name=p_name)
collection_w_alias.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb * 2}]})
check_items={exp_res: [{count: ct.default_nb * 2}],
"pk_name": collection_w.primary_field.name})
# release collection and alias drop partition
collection_w_alias.drop_partition(p_name, check_task=CheckTasks.err_res,
@ -3834,13 +3893,15 @@ class TestQueryCount(TestcaseBase):
assert res is False
collection_w_alias.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb}]})
check_items={exp_res: [{count: ct.default_nb}],
"pk_name": collection_w.primary_field.name})
# alias delete and count
collection_w_alias.delete(f"{ct.default_int64_field_name} in {[i for i in range(ct.default_nb)]}")
collection_w_alias.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 0}]})
check_items={exp_res: [{count: 0}],
"pk_name": collection_w.primary_field.name})
collection_w_alias.drop(check_task=CheckTasks.err_res,
check_items={ct.err_code: 1,
@ -3886,21 +3947,24 @@ class TestQueryCount(TestcaseBase):
collection_w.upsert(df_zero)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb}]})
check_items={exp_res: [{count: ct.default_nb}],
"pk_name": collection_w.primary_field.name})
# upsert new id and count
df_new = cf.gen_default_dataframe_data(nb=1, start=ct.default_nb)
collection_w.upsert(df_new)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb + 1}]})
check_items={exp_res: [{count: ct.default_nb + 1}],
"pk_name": collection_w.primary_field.name})
# upsert existed id and count
df_existed = cf.gen_default_dataframe_data(nb=1, start=10)
collection_w.upsert(df_existed)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb + 1}]})
check_items={exp_res: [{count: ct.default_nb + 1}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_count_upsert_duplicate(self):
@ -3927,21 +3991,24 @@ class TestQueryCount(TestcaseBase):
collection_w.upsert(df_existed)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: tmp_nb}]}
check_items={exp_res: [{count: tmp_nb}],
"pk_name": collection_w.primary_field.name}
)
# delete id and count
delete_res, _ = collection_w.delete(default_term_expr)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: tmp_nb - delete_res.delete_count}]})
check_items={exp_res: [{count: tmp_nb - delete_res.delete_count}],
"pk_name": collection_w.primary_field.name})
# upsert deleted id and count
df_deleted = cf.gen_default_dataframe_data(nb=delete_res.delete_count, start=0)
collection_w.upsert(df_deleted)
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: tmp_nb}]})
check_items={exp_res: [{count: tmp_nb}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_count_rename_collection(self):
@ -3959,7 +4026,8 @@ class TestQueryCount(TestcaseBase):
self.collection_wrap.init_collection(new_name)
self.collection_wrap.query(expr=default_expr, output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb}]})
check_items={exp_res: [{count: ct.default_nb}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_count_disable_growing_segments(self):
@ -3978,7 +4046,8 @@ class TestQueryCount(TestcaseBase):
collection_w.insert(cf.gen_default_dataframe_data(nb=100))
collection_w.query(expr=default_expr, output_fields=[ct.default_count_output], ignore_growing=True,
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 0}]})
check_items={exp_res: [{count: 0}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_count_expressions(self):
@ -4006,14 +4075,16 @@ class TestQueryCount(TestcaseBase):
# count with expr
collection_w.query(expr=expr, output_fields=[count],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: res}]})
check_items={exp_res: [{count: res}],
"pk_name": collection_w.primary_field.name})
# count agian with expr template
expr = cf.get_expr_from_template(expressions[1]).replace("&&", "and").replace("||", "or")
expr_params = cf.get_expr_params_from_template(expressions[1])
collection_w.query(expr=expr, expr_params=expr_params, output_fields=[count],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: res}]})
check_items={exp_res: [{count: res}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("bool_type", [True, False, "true", "false"])
@ -4048,7 +4119,8 @@ class TestQueryCount(TestcaseBase):
expression = f"{ct.default_bool_field_name} == {bool_type}"
collection_w.query(expr=expression, output_fields=[count],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: res}]})
check_items={exp_res: [{count: res}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_count_expression_auto_field(self):
@ -4074,12 +4146,16 @@ class TestQueryCount(TestcaseBase):
# count with expr
collection_w.query(expr=expr, output_fields=[count],
check_task=CheckTasks.check_query_results, check_items={exp_res: [{count: res}]})
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: res}],
"pk_name": collection_w.primary_field.name})
# count with expr and expr_params
expr = cf.get_expr_from_template(expressions[1]).replace("&&", "and").replace("||", "or")
expr_params = cf.get_expr_params_from_template(expressions[1])
collection_w.query(expr=expr, expr_params=expr_params, output_fields=[count],
check_task=CheckTasks.check_query_results, check_items={exp_res: [{count: res}]})
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: res}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L2)
def test_count_expression_all_datatype(self):
@ -4095,7 +4171,8 @@ class TestQueryCount(TestcaseBase):
expr = "int64 >= 0 && int32 >= 1999 && int16 >= 0 && int8 <= 0 && float <= 1999.0 && double >= 0"
collection_w.query(expr=expr, output_fields=[count],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 1}]})
check_items={exp_res: [{count: 1}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_count_expression_comparative(self):
@ -4124,7 +4201,8 @@ class TestQueryCount(TestcaseBase):
expression = "int64_1 >= int64_2"
collection_w.query(expr=expression, output_fields=[count],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: res}]})
check_items={exp_res: [{count: res}],
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("index", ct.all_index_types[9:11])
@ -4146,16 +4224,19 @@ class TestQueryCount(TestcaseBase):
collection_w.load()
collection_w.query(expr=default_expr, output_fields=[count],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: ct.default_nb}]})
check_items={exp_res: [{count: ct.default_nb}],
"pk_name": collection_w.primary_field.name})
expr = "int64 > 50 && int64 < 100 && float < 75"
collection_w.query(expr=expr, output_fields=[count],
check_task=CheckTasks.check_query_results,
check_items={exp_res: [{count: 24}]})
check_items={exp_res: [{count: 24}],
"pk_name": collection_w.primary_field.name})
batch_size = 100
collection_w.query_iterator(batch_size=batch_size, expr=default_expr,
check_task=CheckTasks.check_query_iterator,
check_items={"count": ct.default_nb,
"batch_size": batch_size})
"batch_size": batch_size,
"pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.repeat(3)
@ -4175,7 +4256,8 @@ class TestQueryCount(TestcaseBase):
collection_w = self.init_collection_general(prefix, True, 200, partition_num=1, is_index=True)[0]
collection_w.query(expr='', output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{ct.default_count_output: 200}]})
check_items={"exp_res": [{ct.default_count_output: 200}],
"pk_name": collection_w.primary_field.name})
collection_w.release()
partition_w1, partition_w2 = collection_w.partitions
# load
@ -4190,10 +4272,12 @@ class TestQueryCount(TestcaseBase):
# search on collection, partition1, partition2
collection_w.query(expr='', output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{ct.default_count_output: 50}]})
check_items={"exp_res": [{ct.default_count_output: 50}],
"pk_name": collection_w.primary_field.name})
partition_w1.query(expr='', output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{ct.default_count_output: 50}]})
check_items={"exp_res": [{ct.default_count_output: 50}],
"pk_name": collection_w.primary_field.name})
vectors = [[random.random() for _ in range(ct.default_dim)] for _ in range(ct.default_nq)]
collection_w.search(vectors[:1], ct.default_float_vec_field_name, ct.default_search_params, 200,
partition_names=[partition_w2.name],
@ -4249,7 +4333,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in {int_values[:pos]}'
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L0)
def test_query_by_expr_none_with_none_data(self, enable_dynamic_field, null_data_percent):
@ -4276,7 +4361,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f''
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, default_float_field_name],
limit=pos, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
limit=pos, check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L0)
def test_query_by_nullable_field_with_none_data(self):
@ -4298,7 +4384,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'{default_float_field_name} < {pos}'
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L0)
def test_query_after_none_data_all_field_datatype(self, varchar_scalar_index, numeric_scalar_index,
@ -4351,7 +4438,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'0 <= {ct.default_int64_field_name} < {pos}'
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, ct.default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L0)
def test_query_default_value_with_insert(self, enable_dynamic_field):
@ -4379,7 +4467,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in {int_values[:pos]}'
# 2. query
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_default_value_without_insert(self, enable_dynamic_field):
@ -4396,7 +4485,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} > 0'
# 2. query
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: []})
check_task=CheckTasks.check_query_results,
check_items={exp_res: [], "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L0)
def test_query_after_default_data_all_field_datatype(self, varchar_scalar_index, numeric_scalar_index):
@ -4449,7 +4539,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'0 <= {ct.default_int64_field_name} < {pos}'
# 5. query
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, ct.default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="issue #36003")
@ -4477,7 +4568,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'{ct.default_float_field_name} in [10.0]'
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, default_float_field_name],
limit=pos, check_task=CheckTasks.check_query_results, check_items={exp_res: res})
limit=pos, check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.tags(CaseLabel.GPU)
@ -4522,7 +4614,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in {int64_values[:pos]}'
# 5. query
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, ct.default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_iterator_with_none_data(self, null_data_percent):
@ -4544,6 +4637,7 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
collection_w.query_iterator(batch_size, expr=expr,
check_task=CheckTasks.check_query_iterator,
check_items={"count": ct.default_nb,
"pk_name": collection_w.primary_field.name,
"batch_size": batch_size})
@pytest.mark.tags(CaseLabel.L1)
@ -4573,7 +4667,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in {int_values[:pos]}'
collection_w.query(term_expr, output_fields=[ct.default_int64_field_name, default_float_field_name],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="issue #36538")
@ -4594,7 +4689,8 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
default_value_fields={ct.default_string_field_name: "data"})[0]
collection_w.query(expr='', output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{ct.default_count_output: 200}]})
check_items={"exp_res": [{ct.default_count_output: 200}],
"pk_name": collection_w.primary_field.name})
collection_w.release()
partition_w1, partition_w2 = collection_w.partitions
# load
@ -4609,10 +4705,12 @@ class TestQueryNoneAndDefaultData(TestcaseBase):
# search on collection, partition1, partition2
collection_w.query(expr='', output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{ct.default_count_output: 50}]})
check_items={"exp_res": [{ct.default_count_output: 50}],
"pk_name": collection_w.primary_field.name})
partition_w1.query(expr='', output_fields=[ct.default_count_output],
check_task=CheckTasks.check_query_results,
check_items={"exp_res": [{ct.default_count_output: 50}]})
check_items={"exp_res": [{ct.default_count_output: 50}],
"pk_name": collection_w.primary_field.name})
vectors = [[random.random() for _ in range(ct.default_dim)] for _ in range(ct.default_nq)]
collection_w.search(vectors[:1], ct.default_float_vec_field_name, ct.default_search_params, 200,
partition_names=[partition_w2.name],
@ -7044,8 +7142,7 @@ class TestQueryFunction(TestcaseBase):
mixed_call_expr,
output_fields=output_fields,
check_task=CheckTasks.check_query_results,
check_items={exp_res: res},
)
check_items={exp_res: res, "pk_name": collection_w.primary_field.name})
@pytest.mark.tags(CaseLabel.L1)
def test_query_invalid(self):

View File

@ -45,6 +45,7 @@ class TestQueryIterator(TestcaseBase):
collection_w.query_iterator(batch_size, expr=expr,
check_task=CheckTasks.check_query_iterator,
check_items={"count": nb,
"pk_name": collection_w.primary_field.name,
"batch_size": batch_size})
# 3. query iterator with checkpoint file
iterator_cp_file = f"/tmp/it_{collection_w.name}_cp"
@ -103,6 +104,7 @@ class TestQueryIterator(TestcaseBase):
# 2. query iterator
collection_w.query_iterator(check_task=CheckTasks.check_query_iterator,
check_items={"count": ct.default_nb,
"pk_name": collection_w.primary_field.name,
"batch_size": ct.default_batch_size})
@pytest.mark.tags(CaseLabel.L2)
@ -124,6 +126,7 @@ class TestQueryIterator(TestcaseBase):
collection_w.query_iterator(batch_size, expr=expr, offset=offset,
check_task=CheckTasks.check_query_iterator,
check_items={"count": ct.default_nb - offset,
"pk_name": collection_w.primary_field.name,
"batch_size": batch_size})
@pytest.mark.tags(CaseLabel.L2)
@ -145,6 +148,7 @@ class TestQueryIterator(TestcaseBase):
output_fields=[ct.default_float_vec_field_name],
check_task=CheckTasks.check_query_iterator,
check_items={"count": ct.default_nb,
"pk_name": collection_w.primary_field.name,
"batch_size": batch_size})
@pytest.mark.tags(CaseLabel.L2)
@ -166,6 +170,7 @@ class TestQueryIterator(TestcaseBase):
collection_w.query_iterator(batch_size=batch_size, expr=expr, offset=offset,
check_task=CheckTasks.check_query_iterator,
check_items={"count": ct.default_nb - offset,
"pk_name": collection_w.primary_field.name,
"batch_size": batch_size})
@pytest.mark.tags(CaseLabel.L2)
@ -185,6 +190,7 @@ class TestQueryIterator(TestcaseBase):
collection_w.query_iterator(limit=limit, expr="", offset=offset,
check_task=CheckTasks.check_query_iterator,
check_items={"count": max(Count, 0),
"pk_name": collection_w.primary_field.name,
"batch_size": ct.default_batch_size})
@pytest.mark.tags(CaseLabel.L2)
@ -235,6 +241,7 @@ class TestQueryIterator(TestcaseBase):
check_task=CheckTasks.check_query_iterator,
check_items={"batch_size": batch_size,
"count": ct.default_nb,
"pk_name": collection_w.primary_field.name,
"exp_ids": insert_ids})
file_exist = os.path.isfile(iterator_cp_file)
assert file_exist is True, "The checkpoint exists if not iterator.close()"
@ -258,13 +265,17 @@ class TestQueryIterator(TestcaseBase):
exp_ids = sorted(insert_ids)
collection_w.query_iterator(batch_size, output_fields=[ct.default_string_field_name],
check_task=CheckTasks.check_query_iterator,
check_items={"batch_size": batch_size, "count": ct.default_nb, "exp_ids": exp_ids})
check_items={"batch_size": batch_size,
"pk_name": collection_w.primary_field.name,
"count": ct.default_nb, "exp_ids": exp_ids})
# 3. query with pagination
exp_ids = sorted(insert_ids)[offset:]
collection_w.query_iterator(batch_size, offset=offset, output_fields=[ct.default_string_field_name],
check_task=CheckTasks.check_query_iterator,
check_items={"batch_size": batch_size, "count": ct.default_nb - offset, "exp_ids": exp_ids})
check_items={"batch_size": batch_size,
"pk_name": collection_w.primary_field.name,
"count": ct.default_nb - offset, "exp_ids": exp_ids})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("primary_field", [ct.default_string_field_name, ct.default_int64_field_name])
@ -294,6 +305,7 @@ class TestQueryIterator(TestcaseBase):
# 2. query iterator
collection_w.query_iterator(check_task=CheckTasks.check_query_iterator,
check_items={"count": nb,
"pk_name": collection_w.primary_field.name,
"batch_size": ct.default_batch_size})
@pytest.mark.tags(CaseLabel.L2)

View File

@ -1804,7 +1804,8 @@ class TestUtilityAdvanced(TestcaseBase):
term_expr = f'{ct.default_int64_field_name} in {insert_res.primary_keys[:10]}'
res = df.iloc[:10, :1].to_dict('records')
collection_w.query(term_expr, check_task=CheckTasks.check_query_results,
check_items={'exp_res': res})
check_items={'exp_res': res,
"pk_name": collection_w.primary_field.name})
search_res_before, _ = collection_w.search(df[ct.default_float_vec_field_name][:1].to_list(),
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit)
@ -1822,7 +1823,8 @@ class TestUtilityAdvanced(TestcaseBase):
# query and search from handoff segments
collection_w.query(term_expr, check_task=CheckTasks.check_query_results,
check_items={'exp_res': res})
check_items={'exp_res': res,
"pk_name": collection_w.primary_field.name})
search_res_after, _ = collection_w.search(df[ct.default_float_vec_field_name][:1].to_list(),
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit)