test: update test cases (#30777)

Signed-off-by: nico <cheng.yuan@zilliz.com>
This commit is contained in:
nico 2024-03-02 11:01:13 +08:00 committed by GitHub
parent 0a2c255630
commit d8164c43d2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 84 additions and 75 deletions

View File

@ -1751,6 +1751,11 @@ def extract_vector_field_name_list(collection_w):
schema_dict = collection_w.schema.to_dict() schema_dict = collection_w.schema.to_dict()
fields = schema_dict.get('fields') fields = schema_dict.get('fields')
vector_name_list = [] vector_name_list = []
for field in fields:
if str(field['type']) in ["101", "102", "103"]:
if field['name'] != ct.default_float_vec_field_name:
vector_name_list.append(field['name'])
for field in fields: for field in fields:
if str(field['type']) == 'DataType.FLOAT_VECTOR' \ if str(field['type']) == 'DataType.FLOAT_VECTOR' \
or str(field['type']) == 'DataType.FLOAT16_VECTOR' \ or str(field['type']) == 'DataType.FLOAT16_VECTOR' \

View File

@ -1400,7 +1400,8 @@ class TestCollectionDataframe(TestcaseBase):
# one field different type df # one field different type df
mix_data = [(1, 2., [0.1, 0.2]), (2, 3., 4)] mix_data = [(1, 2., [0.1, 0.2]), (2, 3., 4)]
df = pd.DataFrame(data=mix_data, columns=list("ABC")) df = pd.DataFrame(data=mix_data, columns=list("ABC"))
error = {ct.err_code: 0, ct.err_msg: "The data in the same column must be of the same type"} error = {ct.err_code: 1,
ct.err_msg: "The Input data type is inconsistent with defined schema, please check it."}
self.collection_wrap.construct_from_dataframe(c_name, df, primary_field='A', check_task=CheckTasks.err_res, self.collection_wrap.construct_from_dataframe(c_name, df, primary_field='A', check_task=CheckTasks.err_res,
check_items=error) check_items=error)
@ -3556,13 +3557,13 @@ class TestLoadPartition(TestcaseBase):
4. load collection 4. load collection
expected: No exception expected: No exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
partition_w1.load() partition_w1.load()
error = {ct.err_code: 65538, error = {ct.err_code: 65538, ct.err_msg: 'partition not loaded'}
ct.err_msg: 'partition not loaded'}
collection_w.query(default_term_expr, partition_names=[partition2], collection_w.query(default_term_expr, partition_names=[partition2],
check_task=CheckTasks.err_res, check_items=error) check_task=CheckTasks.err_res, check_items=error)
collection_w.load() collection_w.load()
@ -3577,7 +3578,8 @@ class TestLoadPartition(TestcaseBase):
4. load collection 4. load collection
expected: No exception expected: No exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3594,7 +3596,8 @@ class TestLoadPartition(TestcaseBase):
3. query on the partitions 3. query on the partitions
expected: No exception expected: No exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3612,7 +3615,8 @@ class TestLoadPartition(TestcaseBase):
5. query on the collection 5. query on the collection
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3633,7 +3637,8 @@ class TestLoadPartition(TestcaseBase):
3. load collection 3. load collection
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3651,7 +3656,8 @@ class TestLoadPartition(TestcaseBase):
5. query on the collection 5. query on the collection
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3674,7 +3680,8 @@ class TestLoadPartition(TestcaseBase):
4. search on the collection 4. search on the collection
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3693,7 +3700,8 @@ class TestLoadPartition(TestcaseBase):
4. query on the partitions 4. query on the partitions
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3714,14 +3722,14 @@ class TestLoadPartition(TestcaseBase):
5. query on the partitions 5. query on the partitions
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
partition_w1.release() partition_w1.release()
partition_w2.release() partition_w2.release()
error = {ct.err_code: 65535, error = {ct.err_code: 65535, ct.err_msg: 'collection not loaded'}
ct.err_msg: 'collection not loaded'}
collection_w.query(default_term_expr, partition_names=[partition1, partition2], collection_w.query(default_term_expr, partition_names=[partition1, partition2],
check_task=CheckTasks.err_res, check_items=error) check_task=CheckTasks.err_res, check_items=error)
collection_w.load() collection_w.load()
@ -3737,7 +3745,8 @@ class TestLoadPartition(TestcaseBase):
4. query on the partition 4. query on the partition
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3758,7 +3767,8 @@ class TestLoadPartition(TestcaseBase):
6. query on the collection 6. query on the collection
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3781,7 +3791,8 @@ class TestLoadPartition(TestcaseBase):
4. load the partition 4. load the partition
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3801,7 +3812,8 @@ class TestLoadPartition(TestcaseBase):
4. load collection 4. load collection
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3821,7 +3833,8 @@ class TestLoadPartition(TestcaseBase):
4. query on the partition 4. query on the partition
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3841,7 +3854,8 @@ class TestLoadPartition(TestcaseBase):
6. query on the partition 6. query on the partition
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3865,7 +3879,8 @@ class TestLoadPartition(TestcaseBase):
5. query on the collection 5. query on the collection
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()
@ -3883,7 +3898,8 @@ class TestLoadPartition(TestcaseBase):
3. query on the first partition 3. query on the first partition
expected: no exception expected: no exception
""" """
collection_w = self.init_collection_general(prefix)[0] collection_w = self.init_collection_general(prefix, is_index=False)[0]
collection_w.create_index(default_search_field)
partition_w1 = self.init_partition_wrap(collection_w, partition1) partition_w1 = self.init_partition_wrap(collection_w, partition1)
partition_w2 = self.init_partition_wrap(collection_w, partition2) partition_w2 = self.init_partition_wrap(collection_w, partition2)
partition_w1.load() partition_w1.load()

View File

@ -108,7 +108,7 @@ class TestDeleteParams(TestcaseBase):
""" """
# init collection with tmp_nb default data # init collection with tmp_nb default data
collection_w = self.init_collection_general(prefix, nb=tmp_nb, insert_data=True)[0] collection_w = self.init_collection_general(prefix, nb=tmp_nb, insert_data=True)[0]
error = {ct.err_code: 0, ct.err_msg: "expr cannot be None"} error = {ct.err_code: 1, ct.err_msg: "expr cannot be None"}
collection_w.delete(expr=None, check_task=CheckTasks.err_res, check_items=error) collection_w.delete(expr=None, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@ -121,7 +121,7 @@ class TestDeleteParams(TestcaseBase):
""" """
# init collection with tmp_nb default data # init collection with tmp_nb default data
collection_w = self.init_collection_general(prefix, nb=tmp_nb, insert_data=True)[0] collection_w = self.init_collection_general(prefix, nb=tmp_nb, insert_data=True)[0]
error = {ct.err_code: 0, ct.err_msg: f"expr value {expr} is illegal"} error = {ct.err_code: 1, ct.err_msg: f"expr value {expr} is illegal"}
collection_w.delete(expr, check_task=CheckTasks.err_res, check_items=error) collection_w.delete(expr, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@ -134,8 +134,8 @@ class TestDeleteParams(TestcaseBase):
""" """
# init collection with tmp_nb default data # init collection with tmp_nb default data
collection_w = self.init_collection_general(prefix, nb=tmp_nb, insert_data=True)[0] collection_w = self.init_collection_general(prefix, nb=tmp_nb, insert_data=True)[0]
error = {ct.err_code: 1, error = {ct.err_code: 1100,
ct.err_msg: f"failed to create expr plan, expr = {expr}"} ct.err_msg: f"failed to create delete plan: cannot parse expression: {expr}"}
collection_w.delete(expr, check_task=CheckTasks.err_res, check_items=error) collection_w.delete(expr, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@ -241,7 +241,7 @@ class TestDeleteParams(TestcaseBase):
expr = f'{ct.default_int64_field_name} in {[0.0, 1.0]}' expr = f'{ct.default_int64_field_name} in {[0.0, 1.0]}'
# Bad exception message # Bad exception message
error = {ct.err_code: 1, ct.err_msg: "failed to create expr plan,"} error = {ct.err_code: 1100, ct.err_msg: "failed to create delete plan: cannot parse expression"}
collection_w.delete(expr=expr, check_task=CheckTasks.err_res, check_items=error) collection_w.delete(expr=expr, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@ -256,7 +256,7 @@ class TestDeleteParams(TestcaseBase):
expr = f'{ct.default_int64_field_name} in {[0, 1.0]}' expr = f'{ct.default_int64_field_name} in {[0, 1.0]}'
# Bad exception message # Bad exception message
error = {ct.err_code: 1, ct.err_msg: "failed to create expr plan"} error = {ct.err_code: 1100, ct.err_msg: "failed to create delete plan: cannot parse expression"}
collection_w.delete(expr=expr, check_task=CheckTasks.err_res, check_items=error) collection_w.delete(expr=expr, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L0) @pytest.mark.tags(CaseLabel.L0)
@ -2081,6 +2081,7 @@ class TestDeleteComplexExpr(TestcaseBase):
""" """
# init collection with nb default data # init collection with nb default data
collection_w = self.init_collection_general(prefix, False)[0] collection_w = self.init_collection_general(prefix, False)[0]
collection_w.release()
# delete # delete
error = {ct.err_code: 101, ct.err_msg: "collection not loaded"} error = {ct.err_code: 101, ct.err_msg: "collection not loaded"}
@ -2201,7 +2202,7 @@ class TestDeleteComplexExpr(TestcaseBase):
collection_w.load() collection_w.load()
# delete with expressions # delete with expressions
error = {ct.err_code: 1, ct.err_msg: f"failed to create expr plan, expr = {expressions}"} error = {ct.err_code: 1100, ct.err_msg: f"failed to create delete plan: cannot parse expression: {expressions}"}
collection_w.delete(expressions, check_task=CheckTasks.err_res, check_items=error) collection_w.delete(expressions, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)

View File

@ -429,10 +429,9 @@ class TestInsertParams(TestcaseBase):
collection_w = self.init_collection_wrap(name=c_name) collection_w = self.init_collection_wrap(name=c_name)
data = cf.gen_default_list_data(nb=100) data = cf.gen_default_list_data(nb=100)
data[0][1] = 1.0 data[0][1] = 1.0
error = {ct.err_code: 0, error = {ct.err_code: 1,
ct.err_msg: "The data in the same column must be of the same type"} ct.err_msg: "The Input data type is inconsistent with defined schema, please check it."}
collection_w.insert( collection_w.insert(data, check_task=CheckTasks.err_res, check_items=error)
data, check_task=CheckTasks.err_res, check_items=error)
class TestInsertOperation(TestcaseBase): class TestInsertOperation(TestcaseBase):
@ -1325,14 +1324,12 @@ class TestInsertInvalid(TestcaseBase):
method: insert int8 out of range method: insert int8 out of range
expected: raise exception expected: raise exception
""" """
collection_w = self.init_collection_general( collection_w = self.init_collection_general(prefix, is_all_data_type=True)[0]
prefix, is_all_data_type=True)[0]
data = cf.gen_dataframe_all_data_type(nb=1) data = cf.gen_dataframe_all_data_type(nb=1)
data[ct.default_int8_field_name] = [invalid_int8] data[ct.default_int8_field_name] = [invalid_int8]
error = {ct.err_code: 1100, 'err_msg': "The data type of field int8 doesn't match, " error = {ct.err_code: 1100, 'err_msg': "The data type of field int8 doesn't match, "
"expected: INT8, got INT64"} "expected: INT8, got INT64"}
collection_w.insert( collection_w.insert(data, check_task=CheckTasks.err_res, check_items=error)
data, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("invalid_int16", [-32769, 32768]) @pytest.mark.parametrize("invalid_int16", [-32769, 32768])
@ -1342,14 +1339,12 @@ class TestInsertInvalid(TestcaseBase):
method: insert int16 out of range method: insert int16 out of range
expected: raise exception expected: raise exception
""" """
collection_w = self.init_collection_general( collection_w = self.init_collection_general(prefix, is_all_data_type=True)[0]
prefix, is_all_data_type=True)[0]
data = cf.gen_dataframe_all_data_type(nb=1) data = cf.gen_dataframe_all_data_type(nb=1)
data[ct.default_int16_field_name] = [invalid_int16] data[ct.default_int16_field_name] = [invalid_int16]
error = {ct.err_code: 1100, 'err_msg': "The data type of field int16 doesn't match, " error = {ct.err_code: 1100, 'err_msg': "The data type of field int16 doesn't match, "
"expected: INT16, got INT64"} "expected: INT16, got INT64"}
collection_w.insert( collection_w.insert(data, check_task=CheckTasks.err_res, check_items=error)
data, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("invalid_int32", [-2147483649, 2147483648]) @pytest.mark.parametrize("invalid_int32", [-2147483649, 2147483648])
@ -1359,14 +1354,12 @@ class TestInsertInvalid(TestcaseBase):
method: insert int32 out of range method: insert int32 out of range
expected: raise exception expected: raise exception
""" """
collection_w = self.init_collection_general( collection_w = self.init_collection_general(prefix, is_all_data_type=True)[0]
prefix, is_all_data_type=True)[0]
data = cf.gen_dataframe_all_data_type(nb=1) data = cf.gen_dataframe_all_data_type(nb=1)
data[ct.default_int32_field_name] = [invalid_int32] data[ct.default_int32_field_name] = [invalid_int32]
error = {ct.err_code: 1, 'err_msg': "The data type of field int16 doesn't match, " error = {ct.err_code: 1, 'err_msg': "The data type of field int32 doesn't match, "
"expected: INT32, got INT64"} "expected: INT32, got INT64"}
collection_w.insert( collection_w.insert(data, check_task=CheckTasks.err_res, check_items=error)
data, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip("no error code provided now") @pytest.mark.skip("no error code provided now")

View File

@ -61,7 +61,7 @@ class TestQueryParams(TestcaseBase):
""" """
collection_w, entities = self.init_collection_general(prefix, insert_data=True, nb=10)[0:2] collection_w, entities = self.init_collection_general(prefix, insert_data=True, nb=10)[0:2]
term_expr = f'{default_int_field_name} in {entities[:default_pos]}' term_expr = f'{default_int_field_name} in {entities[:default_pos]}'
error = {ct.err_code: 65535, ct.err_msg: "cannot parse expression: int64 in .."} error = {ct.err_code: 1100, ct.err_msg: "cannot parse expression: int64 in .."}
collection_w.query(term_expr, check_task=CheckTasks.err_res, check_items=error) collection_w.query(term_expr, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L0) @pytest.mark.tags(CaseLabel.L0)
@ -228,8 +228,8 @@ class TestQueryParams(TestcaseBase):
expected: raise exception expected: raise exception
""" """
collection_w, vectors = self.init_collection_general(prefix, insert_data=True)[0:2] collection_w, vectors = self.init_collection_general(prefix, insert_data=True)[0:2]
error = {ct.err_code: 65535, ct.err_msg: "cannot parse expression: 12-s, error: field s not exist"} error = {ct.err_code: 1100, ct.err_msg: "cannot parse expression"}
exprs = ["12-s", "中文", "a", " "] exprs = ["12-s", "中文", "a"]
for expr in exprs: for expr in exprs:
collection_w.query(expr, check_task=CheckTasks.err_res, check_items=error) collection_w.query(expr, check_task=CheckTasks.err_res, check_items=error)
@ -544,8 +544,8 @@ class TestQueryParams(TestcaseBase):
f'{ct.default_int64_field_name} in "in"', f'{ct.default_int64_field_name} in "in"',
f'{ct.default_int64_field_name} in (mn)'] f'{ct.default_int64_field_name} in (mn)']
collection_w, vectors = self.init_collection_general(prefix, insert_data=True)[0:2] collection_w, vectors = self.init_collection_general(prefix, insert_data=True)[0:2]
error = {ct.err_code: 65535, ct.err_msg: "cannot parse expression: int64 in 1, " error = {ct.err_code: 1100, ct.err_msg: "cannot parse expression: int64 in 1, "
"error: line 1:9 no viable alternative at input 'in1'"} "error: line 1:9 no viable alternative at input 'in1'"}
for expr in exprs: for expr in exprs:
collection_w.query(expr, check_task=CheckTasks.err_res, check_items=error) collection_w.query(expr, check_task=CheckTasks.err_res, check_items=error)
@ -571,9 +571,8 @@ class TestQueryParams(TestcaseBase):
""" """
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix)) collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix))
int_values = [[1., 2.], [1, 2.]] int_values = [[1., 2.], [1, 2.]]
error = {ct.err_code: 65535, error = {ct.err_code: 1100,
ct.err_msg: "cannot parse expression: int64 in [1.0, 2.0], error: value '1.0' " ct.err_msg: "failed to create query plan: cannot parse expression: int64 in [1, 2.0]"}
"in list cannot be casted to Int64"}
for values in int_values: for values in int_values:
term_expr = f'{ct.default_int64_field_name} in {values}' term_expr = f'{ct.default_int64_field_name} in {values}'
collection_w.query(term_expr, check_task=CheckTasks.err_res, check_items=error) collection_w.query(term_expr, check_task=CheckTasks.err_res, check_items=error)
@ -587,7 +586,7 @@ class TestQueryParams(TestcaseBase):
""" """
collection_w, vectors = self.init_collection_general(prefix, insert_data=True)[0:2] collection_w, vectors = self.init_collection_general(prefix, insert_data=True)[0:2]
constants = [[1], (), {}] constants = [[1], (), {}]
error = {ct.err_code: 65535, error = {ct.err_code: 1100,
ct.err_msg: "cannot parse expression: int64 in [[1]], error: value '[1]' in " ct.err_msg: "cannot parse expression: int64 in [[1]], error: value '[1]' in "
"list cannot be casted to Int64"} "list cannot be casted to Int64"}
for constant in constants: for constant in constants:
@ -1018,8 +1017,7 @@ class TestQueryParams(TestcaseBase):
# 3. query # 3. query
collection_w.load() collection_w.load()
expression = f"{expr_prefix}({json_field}['list'], {get_not_list})" expression = f"{expr_prefix}({json_field}['list'], {get_not_list})"
error = {ct.err_code: 65535, ct.err_msg: f"cannot parse expression: {expression}, " error = {ct.err_code: 1100, ct.err_msg: f"failed to create query plan: cannot parse expression: {expression}"}
f"error: contains_any operation element must be an array"}
collection_w.query(expression, check_task=CheckTasks.err_res, check_items=error) collection_w.query(expression, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@ -1111,7 +1109,7 @@ class TestQueryParams(TestcaseBase):
collection_w.load() collection_w.load()
expression = f"array_length({ct.default_float_array_field_name}) {op} 51" expression = f"array_length({ct.default_float_array_field_name}) {op} 51"
collection_w.query(expression, check_task=CheckTasks.err_res, collection_w.query(expression, check_task=CheckTasks.err_res,
check_items={ct.err_code: 65535, check_items={ct.err_code: 1100,
ct.err_msg: "cannot parse expression: %s, error %s " ct.err_msg: "cannot parse expression: %s, error %s "
"is not supported" % (expression, op)}) "is not supported" % (expression, op)})

View File

@ -2243,7 +2243,7 @@ class TestCollectionSearch(TestcaseBase):
for vector_name in vector_name_list: for vector_name in vector_name_list:
collection_w.create_index(vector_name, default_index) collection_w.create_index(vector_name, default_index)
# 3. create index on scalar field # 3. create index on scalar field
scalar_index_params = {"index_type":scalar_index, "params": {}} scalar_index_params = {"index_type": scalar_index, "params": {}}
collection_w.create_index(ct.default_int64_field_name, scalar_index_params) collection_w.create_index(ct.default_int64_field_name, scalar_index_params)
collection_w.load() collection_w.load()
# 4. search # 4. search
@ -5194,8 +5194,7 @@ class TestSearchString(TestcaseBase):
# 2. search # 2. search
log.info("test_search_string_field_is_primary_true: searching collection %s" % log.info("test_search_string_field_is_primary_true: searching collection %s" %
collection_w.name) collection_w.name)
vectors = [[random.random() for _ in range(dim)] vectors = [[random.random() for _ in range(dim)] for _ in range(default_nq)]
for _ in range(default_nq)]
output_fields = [default_string_field_name, default_float_field_name] output_fields = [default_string_field_name, default_float_field_name]
vector_list = cf.extract_vector_field_name_list(collection_w) vector_list = cf.extract_vector_field_name_list(collection_w)
for search_field in vector_list: for search_field in vector_list:
@ -5300,10 +5299,9 @@ class TestSearchString(TestcaseBase):
default_search_params, default_limit, default_search_params, default_limit,
default_invaild_string_exp, default_invaild_string_exp,
check_task=CheckTasks.err_res, check_task=CheckTasks.err_res,
check_items={"err_code": 65535, check_items={"err_code": 1100,
"err_msg": "failed to create query plan: cannot parse expression: " "err_msg": "failed to create query plan: cannot "
"varchar >= 0, error: comparisons between VarChar, " "parse expression: varchar >= 0"})
"element_type: None and Int64 elementType: None are not supported"})
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("expression", cf.gen_normal_string_expressions([ct.default_string_field_name])) @pytest.mark.parametrize("expression", cf.gen_normal_string_expressions([ct.default_string_field_name]))
@ -6712,6 +6710,7 @@ class TestCollectionRangeSearch(TestcaseBase):
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("range_filter", [1000, 1000.0]) @pytest.mark.parametrize("range_filter", [1000, 1000.0])
@pytest.mark.parametrize("radius", [0, 0.0]) @pytest.mark.parametrize("radius", [0, 0.0])
@pytest.mark.skip()
def test_range_search_multi_vector_fields(self, nq, dim, auto_id, is_flush, radius, range_filter, enable_dynamic_field): def test_range_search_multi_vector_fields(self, nq, dim, auto_id, is_flush, radius, range_filter, enable_dynamic_field):
""" """
target: test range search normal case target: test range search normal case
@ -10553,7 +10552,6 @@ class TestCollectionHybridSearchValid(TestcaseBase):
assert hybrid_search_0[0].ids == hybrid_search_1[0].ids assert hybrid_search_0[0].ids == hybrid_search_1[0].ids
assert hybrid_search_0[0].distances == hybrid_search_1[0].distances assert hybrid_search_0[0].distances == hybrid_search_1[0].distances
@pytest.mark.tags(CaseLabel.L2) @pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name]) @pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
def test_hybrid_search_overall_limit_larger_sum_each_limit(self, primary_field, dim, def test_hybrid_search_overall_limit_larger_sum_each_limit(self, primary_field, dim,
@ -10696,7 +10694,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
"data": [[random.random() for _ in range(multiple_dim_array[i])] for _ in range(1)], "data": [[random.random() for _ in range(multiple_dim_array[i])] for _ in range(1)],
"anns_field": vector_name_list[i], "anns_field": vector_name_list[i],
"param": {"metric_type": metric_type}, "param": {"metric_type": metric_type},
"limit": max_dim, "limit": max_limit,
"expr": "int64 > 0"} "expr": "int64 > 0"}
req = AnnSearchRequest(**search_param) req = AnnSearchRequest(**search_param)
req_list.append(req) req_list.append(req)
@ -10730,9 +10728,9 @@ class TestCollectionHybridSearchValid(TestcaseBase):
# 3. prepare search params # 3. prepare search params
req_list = [] req_list = []
for i in range(len(vector_name_list)): for i in range(len(vector_name_list)):
limit = max_dim limit = max_limit
if i == 1: if i == 1:
limit = min_dim limit = 1
search_param = { search_param = {
"data": [[random.random() for _ in range(multiple_dim_array[i])] for _ in range(1)], "data": [[random.random() for _ in range(multiple_dim_array[i])] for _ in range(1)],
"anns_field": vector_name_list[i], "anns_field": vector_name_list[i],
@ -11471,7 +11469,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
default_json_field_name] default_json_field_name]
output_fields = output_fields + vector_name_list output_fields = output_fields + vector_name_list
hybrid_res = collection_w.hybrid_search(req_list, WeightedRanker(*weights), default_limit, hybrid_res = collection_w.hybrid_search(req_list, WeightedRanker(*weights), default_limit,
output_fields = output_fields, output_fields=output_fields,
check_task=CheckTasks.check_search_results, check_task=CheckTasks.check_search_results,
check_items={"nq": 1, check_items={"nq": 1,
"ids": insert_ids, "ids": insert_ids,
@ -11657,7 +11655,7 @@ class TestCollectionHybridSearchValid(TestcaseBase):
collection_w, _, _, insert_ids, time_stamp = \ collection_w, _, _, insert_ids, time_stamp = \
self.init_collection_general(prefix, True, primary_field=primary_field, self.init_collection_general(prefix, True, primary_field=primary_field,
multiple_dim_array=[default_dim, default_dim], multiple_dim_array=[default_dim, default_dim],
is_partition_key=primary_field)[0:5] is_partition_key=ct.default_float_field_name)[0:5]
# 2. extract vector field name # 2. extract vector field name
vector_name_list = cf.extract_vector_field_name_list(collection_w) vector_name_list = cf.extract_vector_field_name_list(collection_w)
vector_name_list.append(ct.default_float_vec_field_name) vector_name_list.append(ct.default_float_vec_field_name)
@ -11740,7 +11738,6 @@ class TestCollectionHybridSearchValid(TestcaseBase):
res = collection_w.hybrid_search(req_list, WeightedRanker(*weights), 10) res = collection_w.hybrid_search(req_list, WeightedRanker(*weights), 10)
is_sorted_decrease = lambda lst: all(lst[i]['distance'] >= lst[i+1]['distance'] for i in range(len(lst)-1)) is_sorted_decrease = lambda lst: all(lst[i]['distance'] >= lst[i+1]['distance'] for i in range(len(lst)-1))
assert is_sorted_decrease(res[0]) assert is_sorted_decrease(res[0])
print(res)
@pytest.mark.tags(CaseLabel.L1) @pytest.mark.tags(CaseLabel.L1)
def test_hybrid_search_result_order(self): def test_hybrid_search_result_order(self):
@ -11772,4 +11769,3 @@ class TestCollectionHybridSearchValid(TestcaseBase):
res = collection_w.hybrid_search(req_list, WeightedRanker(*weights), 10) res = collection_w.hybrid_search(req_list, WeightedRanker(*weights), 10)
is_sorted_ascend = lambda lst: all(lst[i]['distance'] <= lst[i+1]['distance'] for i in range(len(lst)-1)) is_sorted_ascend = lambda lst: all(lst[i]['distance'] <= lst[i+1]['distance'] for i in range(len(lst)-1))
assert is_sorted_ascend(res[0]) assert is_sorted_ascend(res[0])
print(res)