From 4c773d25a2c40b0ffd54012fc8d525406c5e5d5b Mon Sep 17 00:00:00 2001 From: zhuwenxing Date: Fri, 28 Apr 2023 14:18:35 +0800 Subject: [PATCH] [test]Add vector to output field for query (#23708) (#23790) Signed-off-by: zhuwenxing --- .../deploy/scripts/action_after_reinstall.py | 4 ++-- .../python_client/deploy/scripts/action_after_upgrade.py | 4 ++-- .../deploy/scripts/action_before_reinstall.py | 8 ++++---- .../python_client/deploy/scripts/action_before_upgrade.py | 6 +++--- tests/python_client/deploy/scripts/utils.py | 8 +++++++- 5 files changed, 18 insertions(+), 12 deletions(-) diff --git a/tests/python_client/deploy/scripts/action_after_reinstall.py b/tests/python_client/deploy/scripts/action_after_reinstall.py index 3196b72b1a..d86efc5f29 100644 --- a/tests/python_client/deploy/scripts/action_after_reinstall.py +++ b/tests/python_client/deploy/scripts/action_after_reinstall.py @@ -13,7 +13,7 @@ def task_1(data_size, host): get_collections(prefix) load_and_search(prefix) release_collection(prefix) - create_collections_and_insert_data(prefix,data_size) + create_collections_and_insert_data(prefix,count=data_size) load_and_search(prefix) @@ -27,7 +27,7 @@ def task_2(data_zise, host): connections.connect(host=host, port=19530, timeout=60) get_collections(prefix) load_and_search(prefix) - create_collections_and_insert_data(prefix, data_zise) + create_collections_and_insert_data(prefix, count=data_size) release_collection(prefix) create_index(prefix) load_and_search(prefix) diff --git a/tests/python_client/deploy/scripts/action_after_upgrade.py b/tests/python_client/deploy/scripts/action_after_upgrade.py index 9c453a4736..6b3c25a7d2 100644 --- a/tests/python_client/deploy/scripts/action_after_upgrade.py +++ b/tests/python_client/deploy/scripts/action_after_upgrade.py @@ -18,7 +18,7 @@ def task_1(data_size, host): assert len(col_list) == len(all_index_types) create_index(prefix) load_and_search(prefix) - create_collections_and_insert_data(prefix, data_size) + create_collections_and_insert_data(prefix, count=data_size) release_collection(prefix) create_index(prefix) load_and_search(prefix) @@ -35,7 +35,7 @@ def task_2(data_size, host): col_list = get_collections(prefix, check=True) assert len(col_list) == len(all_index_types) load_and_search(prefix) - create_collections_and_insert_data(prefix, data_size) + create_collections_and_insert_data(prefix, count=data_size) release_collection(prefix) create_index(prefix) load_and_search(prefix) diff --git a/tests/python_client/deploy/scripts/action_before_reinstall.py b/tests/python_client/deploy/scripts/action_before_reinstall.py index 6b78249682..1ed1af5217 100644 --- a/tests/python_client/deploy/scripts/action_before_reinstall.py +++ b/tests/python_client/deploy/scripts/action_before_reinstall.py @@ -11,10 +11,10 @@ def task_1(data_size, host): prefix = "task_1_" connections.connect(host=host, port=19530, timeout=60) get_collections(prefix) - create_collections_and_insert_data(prefix,data_size) + create_collections_and_insert_data(prefix,count=data_size) create_index(prefix) load_and_search(prefix) - create_collections_and_insert_data(prefix,data_size) + create_collections_and_insert_data(prefix,count=data_size) load_and_search(prefix) @@ -27,9 +27,9 @@ def task_2(data_size, host): prefix = "task_2_" connections.connect(host=host, port=19530, timeout=60) get_collections(prefix) - create_collections_and_insert_data(prefix, data_size) + create_collections_and_insert_data(prefix, count=data_size) create_index(prefix) - create_collections_and_insert_data(prefix, data_size) + create_collections_and_insert_data(prefix, count=data_size) create_index(prefix) load_and_search(prefix) diff --git a/tests/python_client/deploy/scripts/action_before_upgrade.py b/tests/python_client/deploy/scripts/action_before_upgrade.py index a0029f46cb..f62c24ad3f 100644 --- a/tests/python_client/deploy/scripts/action_before_upgrade.py +++ b/tests/python_client/deploy/scripts/action_before_upgrade.py @@ -15,7 +15,7 @@ def task_1(data_size, host): prefix = "task_1_" connections.connect(host=host, port=19530, timeout=60) get_collections(prefix) - create_collections_and_insert_data(prefix, data_size) + create_collections_and_insert_data(prefix, count=data_size) create_index(prefix) load_and_search(prefix) @@ -29,7 +29,7 @@ def task_2(data_size, host): prefix = "task_2_" connections.connect(host=host, port=19530, timeout=60) get_collections(prefix) - create_collections_and_insert_data(prefix, data_size) + create_collections_and_insert_data(prefix, count=data_size) create_index(prefix) load_and_search(prefix) create_collections_and_insert_data(prefix, flush=False, count=data_size) @@ -43,7 +43,7 @@ def task_3(data_size, host): prefix = "task_3_" connections.connect(host=host, port=19530, timeout=60) get_collections(prefix) - create_collections_and_insert_data(prefix, data_size) + create_collections_and_insert_data(prefix, count=data_size) create_index(prefix) load_and_search(prefix) diff --git a/tests/python_client/deploy/scripts/utils.py b/tests/python_client/deploy/scripts/utils.py index 471c4aac2b..cf7f4d5638 100644 --- a/tests/python_client/deploy/scripts/utils.py +++ b/tests/python_client/deploy/scripts/utils.py @@ -108,6 +108,12 @@ def create_collections_and_insert_data(prefix, flush=True, count=3000, collectio end_time = time.time() logger.info(f"[{j+1}/{times}] insert {nb} data, time: {end_time - start_time:.4f}") total_time += end_time - start_time + if j <= times - 3: + collection.flush() + collection.num_entities + if j == times - 3: + collection.compact() + logger.info(f"end insert, time: {total_time:.4f}") if flush: @@ -233,7 +239,7 @@ def load_and_search(prefix, replicas=1): logger.info("search latency: %.4fs" % (end_time - start_time)) t0 = time.time() expr = "count in [2,4,6,8]" - output_fields = ["count", "random_value"] + output_fields = ["count", "random_value", "float_vector"] res = c.query(expr, output_fields, timeout=120) sorted_res = sorted(res, key=lambda k: k['count']) for r in sorted_res: