From b02b525196bdd0dba77d8764aa40603f45d71aed Mon Sep 17 00:00:00 2001 From: zhuwenxing Date: Thu, 1 Feb 2024 09:59:05 +0800 Subject: [PATCH] test: refine debug log and error handle (#30406) refine debug log and error handle --------- Signed-off-by: zhuwenxing --- tests/python_client/chaos/checker.py | 11 +++++++---- tests/python_client/testcases/test_concurrent.py | 2 +- tests/scripts/ci_e2e_4am.sh | 4 ++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/tests/python_client/chaos/checker.py b/tests/python_client/chaos/checker.py index 0eee7349cc..be834efc6f 100644 --- a/tests/python_client/chaos/checker.py +++ b/tests/python_client/chaos/checker.py @@ -34,7 +34,7 @@ def get_chaos_info(): with open(constants.CHAOS_INFO_SAVE_PATH, 'r') as f: chaos_info = json.load(f) except Exception as e: - log.error(f"get_chaos_info error: {e}") + log.warn(f"get_chaos_info error: {e}") return None return chaos_info @@ -106,7 +106,11 @@ class RequestRecords(metaclass=Singleton): def sink(self): if len(self.buffer) == 0: return - df = pd.DataFrame(self.buffer) + try: + df = pd.DataFrame(self.buffer) + except Exception as e: + log.error(f"convert buffer {self.buffer} to dataframe error: {e}") + return if not self.created_file: with request_lock: df.to_parquet(self.file_name, engine='fastparquet') @@ -187,7 +191,7 @@ class ResultAnalyzer: def show_result_table(self): table = PrettyTable() table.field_names = ['operation_name', 'before_chaos', - f'during_chaos\n{self.chaos_start_time}~{self.recovery_time}', + f'during_chaos: {self.chaos_start_time}~{self.recovery_time}', 'after_chaos'] data = self.get_stage_success_rate() for operation, values in data.items(): @@ -380,7 +384,6 @@ class Checker: offset_ts = int(time.time() * self.scale) ts_data.append(offset_ts) data[0] = ts_data # set timestamp (ms) as int64 - log.debug(f"insert data: {ts_data}") res, result = self.c_wrap.insert(data=data, partition_name=partition_name, timeout=timeout, diff --git a/tests/python_client/testcases/test_concurrent.py b/tests/python_client/testcases/test_concurrent.py index fdd9d98b5e..e34e9378aa 100644 --- a/tests/python_client/testcases/test_concurrent.py +++ b/tests/python_client/testcases/test_concurrent.py @@ -26,7 +26,7 @@ def get_all_collections(): data = json.load(f) all_collections = data["all"] except Exception as e: - log.error(f"get_all_collections error: {e}") + log.warn(f"get_all_collections error: {e}") return [None] return all_collections diff --git a/tests/scripts/ci_e2e_4am.sh b/tests/scripts/ci_e2e_4am.sh index 0c225316bd..82a1751903 100755 --- a/tests/scripts/ci_e2e_4am.sh +++ b/tests/scripts/ci_e2e_4am.sh @@ -105,9 +105,9 @@ fi # Run concurrent test with 10 processes if [[ -n "${TEST_TIMEOUT:-}" ]]; then - timeout "${TEST_TIMEOUT}" pytest testcases/test_concurrent.py --host ${MILVUS_SERVICE_NAME} --port ${MILVUS_SERVICE_PORT} --count 10 -n 10 \ + timeout "${TEST_TIMEOUT}" pytest testcases/test_concurrent.py --host ${MILVUS_SERVICE_NAME} --port ${MILVUS_SERVICE_PORT} --count 5 -n 5 \ --html=${CI_LOG_PATH}/report_concurrent.html --self-contained-html else - pytest testcases/test_concurrent.py --host ${MILVUS_SERVICE_NAME} --port ${MILVUS_SERVICE_PORT} --count 10 -n 10 \ + pytest testcases/test_concurrent.py --host ${MILVUS_SERVICE_NAME} --port ${MILVUS_SERVICE_PORT} --count 5 -n 5 \ --html=${CI_LOG_PATH}/report_concurrent.html --self-contained-html fi