[skip ci]change test case level (#3143)

Signed-off-by: godchen0212 <qingxiang.chen@zilliz.com>

Co-authored-by: Wang Xiangyu <xy.wang@zilliz.com>
This commit is contained in:
chen qingxiang 2020-08-05 18:54:11 +08:00 committed by GitHub
parent d252df9050
commit 3970b982e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 68 additions and 10 deletions

View File

@ -122,6 +122,8 @@ class TestStatsBase:
assert stats["partitions"][0]["tag"] == "_default"
assert stats["partitions"][0]["row_count"] == nb
# TODO
@pytest.mark.level(2)
def test_get_collection_stats_after_delete(self, connect, collection):
'''
target: get row count with collection_stats
@ -138,6 +140,8 @@ class TestStatsBase:
assert stats["partitions"][0]["segments"][0]["data_size"] > 0
assert stats["partitions"][0]["segments"][0]["index_name"] == "FLAT"
# TODO
@pytest.mark.level(2)
def test_get_collection_stats_after_compact_parts(self, connect, collection):
'''
target: get row count with collection_stats
@ -160,6 +164,8 @@ class TestStatsBase:
# pdb.set_trace()
assert compact_before > compact_after
# TODO
@pytest.mark.level(2)
def test_get_collection_stats_after_compact_delete_one(self, connect, collection):
'''
target: get row count with collection_stats
@ -219,6 +225,8 @@ class TestStatsBase:
if partition["tag"] in [tag, new_tag]:
assert partition["row_count"] == nb
# TODO
@pytest.mark.level(2)
def test_get_collection_stats_after_index_created(self, connect, collection, get_simple_index):
'''
target: test collection info after index created
@ -233,6 +241,8 @@ class TestStatsBase:
assert stats["partitions"][0]["segments"][0]["row_count"] == nb
assert stats["partitions"][0]["segments"][0]["index_name"] == get_simple_index["index_type"]
# TODO
@pytest.mark.level(2)
def test_get_collection_stats_after_index_created_ip(self, connect, collection, get_simple_index):
'''
target: test collection info after index created
@ -263,6 +273,8 @@ class TestStatsBase:
assert stats["partitions"][0]["segments"][0]["row_count"] == nb
assert stats["partitions"][0]["segments"][0]["index_name"] == get_jaccard_index["index_type"]
# TODO
@pytest.mark.level(2)
def test_get_collection_stats_after_create_different_index(self, connect, collection):
'''
target: test collection info after index created repeatedly
@ -272,7 +284,7 @@ class TestStatsBase:
ids = connect.insert(collection, entities)
connect.flush([collection])
for index_type in ["IVF_FLAT", "IVF_SQ8"]:
connect.create_index(collection, field_name, {"index_type": index_type, "nlist": 1024, "metric_type": "L2"})
connect.create_index(collection, field_name, {"index_type": index_type, "params":{"nlist": 1024}, "metric_type": "L2"})
stats = connect.get_collection_stats(collection)
logging.getLogger().info(stats)
assert stats["partitions"][0]["segments"][0]["index_name"] == index_type
@ -298,6 +310,8 @@ class TestStatsBase:
assert stats["partitions"][0]["segments"][0]["row_count"] == nb
connect.drop_collection(collection_list[i])
# TODO
@pytest.mark.level(2)
def test_collection_count_multi_collections_indexed(self, connect):
'''
target: test collection rows_count is correct or not with multiple collections of L2
@ -314,9 +328,9 @@ class TestStatsBase:
res = connect.insert(collection_name, entities)
connect.flush(collection_list)
if i % 2:
connect.create_index(collection_name, field_name, {"index_type": "IVF_SQ8", "params": {"nlist": 1024}, "metric_type": "L2"})
connect.create_index(collection_name, field_name, {"index_type": "IVF_SQ8", "params":{"nlist": 1024}, "metric_type": "L2"})
else:
connect.create_index(collection_name, field_name, {"index_type": "IVF_FLAT", "params": {"nlist": 1024}, "metric_type": "L2"})
connect.create_index(collection_name, field_name, {"index_type": "IVF_FLAT","params":{ "nlist": 1024}, "metric_type": "L2"})
for i in range(collection_num):
stats = connect.get_collection_stats(collection_list[i])
assert stats["partitions"][0]["segments"][0]["row_count"] == nb
@ -324,4 +338,4 @@ class TestStatsBase:
assert stats["partitions"][0]["segments"][0]["index_name"] == "IVF_SQ8"
else:
assert stats["partitions"][0]["segments"][0]["index_name"] == "IVF_FLAT"
connect.drop_collection(collection_list[i])
connect.drop_collection(collection_list[i])

View File

@ -111,6 +111,8 @@ class TestGetBase:
with pytest.raises(Exception) as e:
res = connect.get_entity_by_id(collection, ids)
# TODO
@pytest.mark.level(2)
def test_get_entity_same_ids(self, connect, id_collection):
'''
target: test.get_entity_by_id, with the same ids
@ -125,6 +127,8 @@ class TestGetBase:
assert len(res) == 1
assert_equal_vector(res[0].get(default_float_vec_field_name), entities[-1]["values"][0])
# TODO
@pytest.mark.level(2)
def test_get_entity_params_same_ids(self, connect, collection):
'''
target: test.get_entity_by_id, with the same ids
@ -397,6 +401,8 @@ class TestGetBase:
for i in range(get_pos):
assert res[i] is None
# TODO
@pytest.mark.level(2)
def test_get_entities_after_delete_compact(self, connect, collection, get_pos):
'''
target: test.get_entity_by_id
@ -464,6 +470,7 @@ class TestGetBase:
enable_flush(connect)
# TODO:
@pytest.mark.level(2)
def test_get_entities_after_delete_same_ids(self, connect, id_collection):
'''
target: test.get_entity_by_id

View File

@ -533,6 +533,8 @@ class TestSearchBase:
res = connect.search(collection, query)
assert abs(np.sqrt(res[0]._distances[0]) - min_distance) <= gen_inaccuracy(res[0]._distances[0])
# TODO
@pytest.mark.level(2)
def test_search_distance_ip(self, connect, collection):
'''
target: search collection, and check the result: distance

View File

@ -105,7 +105,9 @@ class TestCompactBase:
info = connect.get_collection_stats(collection)
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
# TODO
@pytest.mark.level(2)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_insert_and_compact(self, connect, collection):
'''
@ -249,7 +251,9 @@ class TestCompactBase:
logging.getLogger().info(info["partitions"])
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before >= size_after)
# TODO
@pytest.mark.level(2)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_entity_and_compact_twice(self, connect, collection):
'''
@ -306,6 +310,8 @@ class TestCompactBase:
size_after_twice = info["partitions"][0]["segments"][0]["data_size"]
assert(size_after == size_after_twice)
# TODO
@pytest.mark.level(2)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_compact_multi_collections(self, connect):
'''
@ -449,6 +455,8 @@ class TestCompactBinary:
******************************************************************
"""
@pytest.mark.timeout(COMPACT_TIMEOUT)
# TODO
@pytest.mark.level(2)
def test_add_entity_and_compact(self, connect, binary_collection):
'''
target: test add binary vector and compact
@ -467,7 +475,9 @@ class TestCompactBinary:
info = connect.get_collection_stats(binary_collection)
size_after = info["partitions"][0]["segments"][0]["data_size"]
assert(size_before == size_after)
# TODO
@pytest.mark.level(2)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_insert_and_compact(self, connect, binary_collection):
'''
@ -540,7 +550,9 @@ class TestCompactBinary:
assert status.OK()
logging.getLogger().info(info["partitions"])
assert not info["partitions"][0]["segments"]
# TODO
@pytest.mark.level(2)
@pytest.mark.timeout(COMPACT_TIMEOUT)
def test_add_entity_and_compact_twice(self, connect, binary_collection):
'''

View File

@ -87,6 +87,8 @@ class TestFlushBase:
# with pytest.raises(Exception) as e:
# connect.flush([collection])
# TODO
@pytest.mark.level(2)
def test_add_partition_flush(self, connect, id_collection):
'''
method: add entities into partition in collection, flush serveral times
@ -106,6 +108,8 @@ class TestFlushBase:
res_count = connect.count_entities(id_collection)
assert res_count == nb * 2
# TODO
@pytest.mark.level(2)
def test_add_partitions_flush(self, connect, collection):
'''
method: add entities into partitions in collection, flush one
@ -123,6 +127,8 @@ class TestFlushBase:
res = connect.count_entities(collection)
assert res == 2 * nb
# TODO
@pytest.mark.level(2)
def test_add_collections_flush(self, connect, collection):
'''
method: add entities into collections, flush one
@ -143,6 +149,8 @@ class TestFlushBase:
res = connect.count_entities(collection_new)
assert res == nb
# TODO
@pytest.mark.level(2)
def test_add_collections_fields_flush(self, connect, collection, get_filter_field, get_vector_field):
'''
method: create collection with different fields, and add entities into collections, flush one
@ -189,6 +197,8 @@ class TestFlushBase:
logging.getLogger().debug(res)
assert res
# TODO
@pytest.mark.level(2)
# TODO: stable case
def test_add_flush_auto(self, connect, id_collection):
'''
@ -218,6 +228,8 @@ class TestFlushBase:
def same_ids(self, request):
yield request.param
# TODO
@pytest.mark.level(2)
def test_add_flush_same_ids(self, connect, id_collection, same_ids):
'''
method: add entities, with same ids, count(same ids) < 15, > 15

View File

@ -294,6 +294,8 @@ class TestIndexBase:
for t in threads:
t.join()
# TODO
@pytest.mark.level(2)
def test_create_index_collection_not_existed_ip(self, connect, collection):
'''
target: test create index interface when collection name not existed
@ -306,6 +308,8 @@ class TestIndexBase:
with pytest.raises(Exception) as e:
connect.create_index(collection, field_name, default_index)
# TODO
@pytest.mark.level(2)
@pytest.mark.timeout(BUILD_TIMEOUT)
def test_create_index_no_vectors_insert_ip(self, connect, collection, get_simple_index):
'''

View File

@ -98,6 +98,8 @@ class TestCreateBase:
assert tag_name in tag_list
assert "_default" in tag_list
# TODO
@pytest.mark.level(2)
def test_create_partition_insert_default(self, connect, id_collection):
'''
target: test create partition, and insert vectors, check status returned
@ -109,6 +111,8 @@ class TestCreateBase:
insert_ids = connect.insert(id_collection, entities, ids)
assert len(insert_ids) == len(ids)
# TODO
@pytest.mark.level(2)
def test_create_partition_insert_with_tag(self, connect, collection):
'''
target: test create partition, and insert vectors, check status returned
@ -132,6 +136,8 @@ class TestCreateBase:
with pytest.raises(Exception) as e:
insert_ids = connect.insert(collection, entities, ids, partition_tag=tag_new)
# TODO
@pytest.mark.level(2)
def test_create_partition_insert_same_tags(self, connect, collection):
'''
target: test create partition, and insert vectors, check status returned

View File

@ -211,9 +211,10 @@ def gen_default_fields(auto_id=False):
"fields": [
{"field": "int64", "type": DataType.INT64},
{"field": "float", "type": DataType.FLOAT},
{"field": default_float_vec_field_name, "type": DataType.FLOAT_VECTOR, "params": {"dim": dimension}}
{"field": default_float_vec_field_name, "type": DataType.FLOAT_VECTOR, "params": {"dim": dimension}},
],
"segment_row_count": segment_row_count
"segment_row_count": segment_row_count,
"auto_id" : True
}
if auto_id is True:
default_fields["auto_id"] = True