mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-08 10:08:42 +08:00
test: optimizing variable names (#34035)
Signed-off-by: elstic <hao.wang@zilliz.com>
This commit is contained in:
parent
d9f986439d
commit
4e414fb7fc
@ -364,10 +364,10 @@ def gen_collection_schema_all_datatype(description=ct.default_desc,
|
|||||||
else:
|
else:
|
||||||
multiple_dim_array.insert(0, dim)
|
multiple_dim_array.insert(0, dim)
|
||||||
for i in range(len(multiple_dim_array)):
|
for i in range(len(multiple_dim_array)):
|
||||||
if ct.all_float_vector_types[i%3] != ct.sparse_vector:
|
if ct.append_vector_type[i%3] != ct.sparse_vector:
|
||||||
fields.append(gen_float_vec_field(name=f"multiple_vector_{ct.all_float_vector_types[i%3]}",
|
fields.append(gen_float_vec_field(name=f"multiple_vector_{ct.append_vector_type[i%3]}",
|
||||||
dim=multiple_dim_array[i],
|
dim=multiple_dim_array[i],
|
||||||
vector_data_type=ct.all_float_vector_types[i%3]))
|
vector_data_type=ct.append_vector_type[i%3]))
|
||||||
else:
|
else:
|
||||||
# The field of a sparse vector cannot be dimensioned
|
# The field of a sparse vector cannot be dimensioned
|
||||||
fields.append(gen_float_vec_field(name=f"multiple_vector_{ct.sparse_vector}",
|
fields.append(gen_float_vec_field(name=f"multiple_vector_{ct.sparse_vector}",
|
||||||
@ -719,7 +719,7 @@ def gen_dataframe_all_data_type(nb=ct.default_nb, dim=ct.default_dim, start=0, w
|
|||||||
df[ct.default_float_vec_field_name] = float_vec_values
|
df[ct.default_float_vec_field_name] = float_vec_values
|
||||||
else:
|
else:
|
||||||
for i in range(len(multiple_dim_array)):
|
for i in range(len(multiple_dim_array)):
|
||||||
df[multiple_vector_field_name[i]] = gen_vectors(nb, multiple_dim_array[i], ct.all_float_vector_types[i%3])
|
df[multiple_vector_field_name[i]] = gen_vectors(nb, multiple_dim_array[i], ct.append_vector_type[i%3])
|
||||||
|
|
||||||
if with_json is False:
|
if with_json is False:
|
||||||
df.drop(ct.default_json_field_name, axis=1, inplace=True)
|
df.drop(ct.default_json_field_name, axis=1, inplace=True)
|
||||||
@ -757,7 +757,7 @@ def gen_general_list_all_data_type(nb=ct.default_nb, dim=ct.default_dim, start=0
|
|||||||
insert_list.append(float_vec_values)
|
insert_list.append(float_vec_values)
|
||||||
else:
|
else:
|
||||||
for i in range(len(multiple_dim_array)):
|
for i in range(len(multiple_dim_array)):
|
||||||
insert_list.append(gen_vectors(nb, multiple_dim_array[i], ct.all_float_vector_types[i%3]))
|
insert_list.append(gen_vectors(nb, multiple_dim_array[i], ct.append_vector_type[i%3]))
|
||||||
|
|
||||||
if with_json is False:
|
if with_json is False:
|
||||||
# index = insert_list.index(json_values)
|
# index = insert_list.index(json_values)
|
||||||
@ -802,7 +802,7 @@ def gen_default_rows_data_all_data_type(nb=ct.default_nb, dim=ct.default_dim, st
|
|||||||
else:
|
else:
|
||||||
for i in range(len(multiple_dim_array)):
|
for i in range(len(multiple_dim_array)):
|
||||||
dict[multiple_vector_field_name[i]] = gen_vectors(nb, multiple_dim_array[i],
|
dict[multiple_vector_field_name[i]] = gen_vectors(nb, multiple_dim_array[i],
|
||||||
ct.all_float_vector_types[i])[0]
|
ct.append_vector_type[i])[0]
|
||||||
if len(multiple_dim_array) != 0:
|
if len(multiple_dim_array) != 0:
|
||||||
with open(ct.rows_all_data_type_file_path + f'_{partition_id}' + f'_dim{dim}.txt', 'wb') as json_file:
|
with open(ct.rows_all_data_type_file_path + f'_{partition_id}' + f'_dim{dim}.txt', 'wb') as json_file:
|
||||||
pickle.dump(array, json_file)
|
pickle.dump(array, json_file)
|
||||||
@ -1792,7 +1792,7 @@ def insert_data(collection_w, nb=ct.default_nb, is_binary=False, is_all_data_typ
|
|||||||
multiple_vector_field_name=vector_name_list,
|
multiple_vector_field_name=vector_name_list,
|
||||||
vector_data_type=vector_data_type,
|
vector_data_type=vector_data_type,
|
||||||
auto_id=auto_id, primary_field=primary_field)
|
auto_id=auto_id, primary_field=primary_field)
|
||||||
elif vector_data_type in ct.all_float_vector_types:
|
elif vector_data_type in ct.append_vector_type:
|
||||||
default_data = gen_general_default_list_data(nb // num, dim=dim, start=start, with_json=with_json,
|
default_data = gen_general_default_list_data(nb // num, dim=dim, start=start, with_json=with_json,
|
||||||
random_primary_key=random_primary_key,
|
random_primary_key=random_primary_key,
|
||||||
multiple_dim_array=multiple_dim_array,
|
multiple_dim_array=multiple_dim_array,
|
||||||
|
|||||||
@ -45,7 +45,8 @@ float_type = "FLOAT_VECTOR"
|
|||||||
float16_type = "FLOAT16_VECTOR"
|
float16_type = "FLOAT16_VECTOR"
|
||||||
bfloat16_type = "BFLOAT16_VECTOR"
|
bfloat16_type = "BFLOAT16_VECTOR"
|
||||||
sparse_vector = "SPARSE_FLOAT_VECTOR"
|
sparse_vector = "SPARSE_FLOAT_VECTOR"
|
||||||
all_float_vector_types = [float16_type, bfloat16_type, sparse_vector]
|
append_vector_type = [float16_type, bfloat16_type, sparse_vector]
|
||||||
|
all_dense_vector_types = [float_type, float16_type, bfloat16_type]
|
||||||
default_sparse_vec_field_name = "sparse_vector"
|
default_sparse_vec_field_name = "sparse_vector"
|
||||||
default_partition_name = "_default"
|
default_partition_name = "_default"
|
||||||
default_resource_group_name = '__default_resource_group'
|
default_resource_group_name = '__default_resource_group'
|
||||||
|
|||||||
@ -6961,7 +6961,7 @@ class TestCollectionRangeSearch(TestcaseBase):
|
|||||||
******************************************************************
|
******************************************************************
|
||||||
"""
|
"""
|
||||||
@pytest.mark.tags(CaseLabel.L0)
|
@pytest.mark.tags(CaseLabel.L0)
|
||||||
@pytest.mark.parametrize("vector_data_type", ["FLOAT_VECTOR", "FLOAT16_VECTOR", "BFLOAT16_VECTOR"])
|
@pytest.mark.parametrize("vector_data_type", ct.all_dense_vector_types)
|
||||||
def test_range_search_default(self, index_type, metric, vector_data_type):
|
def test_range_search_default(self, index_type, metric, vector_data_type):
|
||||||
"""
|
"""
|
||||||
target: verify the range search returns correct results
|
target: verify the range search returns correct results
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user