From 030d8fb206e5254057aeffd47fceeae8ab0ecb48 Mon Sep 17 00:00:00 2001 From: zhuwenxing Date: Fri, 19 Aug 2022 12:54:54 +0800 Subject: [PATCH] [test]Add testcase for verification (#18715) Signed-off-by: zhuwenxing Signed-off-by: zhuwenxing --- .../test_all_collections_after_chaos.py | 98 +++++++++++++++++++ .../chaos/testcases/test_data_persistence.py | 91 +++++++++++++++++ .../chaos/testcases/test_get_collections.py | 31 ++++++ tests/python_client/utils/util_common.py | 14 ++- 4 files changed, 233 insertions(+), 1 deletion(-) create mode 100644 tests/python_client/chaos/testcases/test_all_collections_after_chaos.py create mode 100644 tests/python_client/chaos/testcases/test_data_persistence.py create mode 100644 tests/python_client/chaos/testcases/test_get_collections.py diff --git a/tests/python_client/chaos/testcases/test_all_collections_after_chaos.py b/tests/python_client/chaos/testcases/test_all_collections_after_chaos.py new file mode 100644 index 0000000000..d7d7632a14 --- /dev/null +++ b/tests/python_client/chaos/testcases/test_all_collections_after_chaos.py @@ -0,0 +1,98 @@ +import time +import pytest + +from base.client_base import TestcaseBase +from common import common_func as cf +from common import common_type as ct +from common.common_type import CaseLabel +from utils.util_log import test_log as log +from utils.util_common import get_collections + +class TestAllCollection(TestcaseBase): + """ Test case of end to end""" + @pytest.fixture(scope="function", params=get_collections()) + def collection_name(self, request): + if request.param == [] or request.param == "": + pytest.skip("The collection name is invalid") + yield request.param + + def teardown_method(self, method): + log.info(("*" * 35) + " teardown " + ("*" * 35)) + log.info("[teardown_method] Start teardown test case %s..." % + method.__name__) + log.info("skip drop collection") + + + @pytest.mark.tags(CaseLabel.L1) + def test_milvus_default(self, collection_name): + # create + name = collection_name + t0 = time.time() + collection_w = self.init_collection_wrap(name=name, active_trace=True) + tt = time.time() - t0 + assert collection_w.name == name + entities = collection_w.num_entities + log.info(f"assert create collection: {tt}, init_entities: {entities}") + + # insert + data = cf.gen_default_list_data() + t0 = time.time() + _, res = collection_w.insert(data) + tt = time.time() - t0 + log.info(f"assert insert: {tt}") + assert res + + # flush + t0 = time.time() + _, check_result = collection_w.flush(timeout=180) + assert check_result + assert collection_w.num_entities == len(data[0]) + entities + tt = time.time() - t0 + entities = collection_w.num_entities + log.info(f"assert flush: {tt}, entities: {entities}") + + # search + collection_w.load() + search_vectors = cf.gen_vectors(1, ct.default_dim) + search_params = {"metric_type": "L2", "params": {"nprobe": 16}} + t0 = time.time() + res_1, _ = collection_w.search(data=search_vectors, + anns_field=ct.default_float_vec_field_name, + param=search_params, limit=1) + tt = time.time() - t0 + log.info(f"assert search: {tt}") + assert len(res_1) == 1 + collection_w.release() + + # index + d = cf.gen_default_list_data() + collection_w.insert(d) + log.info(f"assert index entities: {collection_w.num_entities}") + _index_params = {"index_type": "IVF_SQ8", "params": {"nlist": 64}, "metric_type": "L2"} + t0 = time.time() + index, _ = collection_w.create_index(field_name=ct.default_float_vec_field_name, + index_params=_index_params, + name=cf.gen_unique_str()) + tt = time.time() - t0 + log.info(f"assert index: {tt}") + assert len(collection_w.indexes) == 1 + + # search + t0 = time.time() + collection_w.load() + tt = time.time() - t0 + log.info(f"assert load: {tt}") + search_vectors = cf.gen_vectors(1, ct.default_dim) + t0 = time.time() + res_1, _ = collection_w.search(data=search_vectors, + anns_field=ct.default_float_vec_field_name, + param=search_params, limit=1) + tt = time.time() - t0 + log.info(f"assert search: {tt}") + + # query + term_expr = f'{ct.default_int64_field_name} in [1001,1201,4999,2999]' + t0 = time.time() + res, _ = collection_w.query(term_expr) + tt = time.time() - t0 + log.info(f"assert query result {len(res)}: {tt}") diff --git a/tests/python_client/chaos/testcases/test_data_persistence.py b/tests/python_client/chaos/testcases/test_data_persistence.py new file mode 100644 index 0000000000..97d7fc396f --- /dev/null +++ b/tests/python_client/chaos/testcases/test_data_persistence.py @@ -0,0 +1,91 @@ +import time +import pytest + +from base.client_base import TestcaseBase +from common import common_func as cf +from common import common_type as ct +from common.common_type import CaseLabel +from utils.util_log import test_log as log + + +class TestDataPersistence(TestcaseBase): + """ Test case of end to end""" + def teardown_method(self, method): + log.info(("*" * 35) + " teardown " + ("*" * 35)) + log.info("[teardown_method] Start teardown test case %s..." % + method.__name__) + log.info("skip drop collection") + + @pytest.mark.tags(CaseLabel.L3) + def test_milvus_default(self): + # create + name = "Hello_Milvus" + t0 = time.time() + collection_w = self.init_collection_wrap(name=name, active_trace=True) + tt = time.time() - t0 + assert collection_w.name == name + entities = collection_w.num_entities + log.info(f"assert create collection: {tt}, init_entities: {entities}") + + # insert + data = cf.gen_default_list_data() + t0 = time.time() + _, res = collection_w.insert(data) + tt = time.time() - t0 + log.info(f"assert insert: {tt}") + assert res + + # flush + t0 = time.time() + _, check_result = collection_w.flush(timeout=180) + assert check_result + assert collection_w.num_entities == len(data[0]) + entities + tt = time.time() - t0 + entities = collection_w.num_entities + log.info(f"assert flush: {tt}, entities: {entities}") + + # search + collection_w.load() + search_vectors = cf.gen_vectors(1, ct.default_dim) + search_params = {"metric_type": "L2", "params": {"nprobe": 16}} + t0 = time.time() + res_1, _ = collection_w.search(data=search_vectors, + anns_field=ct.default_float_vec_field_name, + param=search_params, limit=1) + tt = time.time() - t0 + log.info(f"assert search: {tt}") + assert len(res_1) == 1 + collection_w.release() + + # index + d = cf.gen_default_list_data() + collection_w.insert(d) + log.info(f"assert index entities: {collection_w.num_entities}") + _index_params = {"index_type": "IVF_SQ8", "params": {"nlist": 64}, "metric_type": "L2"} + t0 = time.time() + index, _ = collection_w.create_index(field_name=ct.default_float_vec_field_name, + index_params=_index_params, + name=cf.gen_unique_str()) + tt = time.time() - t0 + log.info(f"assert index: {tt}") + assert len(collection_w.indexes) == 1 + + # search + t0 = time.time() + collection_w.load() + tt = time.time() - t0 + log.info(f"assert load: {tt}") + search_vectors = cf.gen_vectors(1, ct.default_dim) + t0 = time.time() + res_1, _ = collection_w.search(data=search_vectors, + anns_field=ct.default_float_vec_field_name, + param=search_params, limit=1) + tt = time.time() - t0 + log.info(f"assert search: {tt}") + + # query + term_expr = f'{ct.default_int64_field_name} in [1001,1201,4999,2999]' + t0 = time.time() + res, _ = collection_w.query(term_expr) + tt = time.time() - t0 + log.info(f"assert query result {len(res)}: {tt}") diff --git a/tests/python_client/chaos/testcases/test_get_collections.py b/tests/python_client/chaos/testcases/test_get_collections.py new file mode 100644 index 0000000000..55a3387cb1 --- /dev/null +++ b/tests/python_client/chaos/testcases/test_get_collections.py @@ -0,0 +1,31 @@ +import time +import json +from collections import defaultdict +import pytest + +from base.client_base import TestcaseBase +from common import common_func as cf +from common import common_type as ct +from deploy.common import get_collections +from common.common_type import CaseLabel +from utils.util_log import test_log as log + + +class TestGetCollections(TestcaseBase): + """ Test case of getting all collections """ + + @pytest.mark.tags(CaseLabel.L1) + def test_get_collections_by_prefix(self,): + self._connect() + all_collections = self.utility_wrap.list_collections()[0] + all_collections = [c_name for c_name in all_collections if "Checker" in c_name] + log.info(f"find {len(all_collections)} collections:") + log.info(all_collections) + data = { + "all": all_collections, + } + with open("/tmp/ci_logs/all_collections.json", "w") as f: + f.write(json.dumps(data)) + log.info(f"write {len(all_collections)} collections to /tmp/ci_logs/all_collections.json") + collections_in_json = get_collections() + assert len(all_collections) == len(collections_in_json) diff --git a/tests/python_client/utils/util_common.py b/tests/python_client/utils/util_common.py index 2b21048106..e2dcccb1d5 100644 --- a/tests/python_client/utils/util_common.py +++ b/tests/python_client/utils/util_common.py @@ -1,5 +1,6 @@ from yaml import full_load - +import json +from utils.util_log import test_log as log def gen_experiment_config(yaml): """load the yaml file of chaos experiment""" @@ -51,6 +52,17 @@ def update_key_name(node, modify_k, modify_k_new): return node +def get_collections(): + try: + with open("/tmp/ci_logs/all_collections.json", "r") as f: + data = json.load(f) + collections = data["all"] + except Exception as e: + log.error(f"get_all_collections error: {e}") + return [] + return collections + + if __name__ == "__main__": d = { "id" : "abcde", "key1" : "blah",