From df4cb81f69d9836b3ac73188eebdeee28d01fc61 Mon Sep 17 00:00:00 2001 From: groot Date: Wed, 25 Dec 2019 03:20:16 -0600 Subject: [PATCH 1/5] If partition tag is similar, wrong partition is searched (#825) * #766 If partition tag is similar, wrong partition is searched * #766 If partition tag is similar, wrong partition is searched * reorder changelog id * typo --- CHANGELOG.md | 1 + core/src/utils/StringHelpFunctions.cpp | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 161225e636..5c6f2d82da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ Please mark all change in change log and use the issue from GitHub ## Feature - \#343 - Add Opentracing - \#665 - Support get/set config via CLI +- \#766 - If partition tag is similar, wrong partition is searched - \#771 - Add server build commit info interface - \#759 - Put C++ sdk out of milvus/core diff --git a/core/src/utils/StringHelpFunctions.cpp b/core/src/utils/StringHelpFunctions.cpp index af5b2e3b4d..0fa9ddfc1c 100644 --- a/core/src/utils/StringHelpFunctions.cpp +++ b/core/src/utils/StringHelpFunctions.cpp @@ -147,7 +147,7 @@ StringHelpFunctions::IsRegexMatch(const std::string& target_str, const std::stri // regex match std::regex pattern(pattern_str); std::smatch results; - if (std::regex_search(target_str, results, pattern)) { + if (std::regex_match(target_str, results, pattern)) { return true; } else { return false; From 03ec41070a815c04cd65f5de741ec7d7e4c77a86 Mon Sep 17 00:00:00 2001 From: quicksilver Date: Thu, 26 Dec 2019 10:33:05 +0800 Subject: [PATCH 2/5] Remove Jfrog Cache on Jenkins CI (#827) * specify multiple urls on sqlite_orm download stage * fix bug * fix bug * specify multiple urls on opentracing download stage * fix bug * specify multiple urls on download stage * delete jfrog cache * print jenkins enviroment variables * print jenkins enviroment variables * fix check_ccache.sh bug * debug * Update Jenkinfile * Add build enviroment resource limit on Jenkins CI * remove Jfrog cache build option --- ci/jenkins/Jenkinsfile | 4 +- ...vus-cpu-version-centos7-build-env-pod.yaml | 2 +- ...cpu-version-ubuntu18.04-build-env-pod.yaml | 2 +- ...vus-gpu-version-centos7-build-env-pod.yaml | 2 +- ...gpu-version-ubuntu18.04-build-env-pod.yaml | 2 +- ci/scripts/build.sh | 10 +- ci/scripts/check_ccache.sh | 4 +- core/build.sh | 10 +- core/cmake/ThirdPartyPackages.cmake | 625 ++++-------------- core/src/index/build.sh | 7 +- .../index/cmake/ThirdPartyPackagesCore.cmake | 244 ++----- .../thirdparty/faiss/ci/jenkins/Jenkinsfile | 579 ---------------- .../faiss/ci/jenkins/step/build.groovy | 35 - .../faiss/ci/jenkins/step/coverage.groovy | 5 - .../thirdparty/faiss/ci/scripts/build.sh | 127 ---- sdk/cmake/ThirdPartyPackages.cmake | 132 +--- 16 files changed, 230 insertions(+), 1560 deletions(-) delete mode 100644 core/src/index/thirdparty/faiss/ci/jenkins/Jenkinsfile delete mode 100644 core/src/index/thirdparty/faiss/ci/jenkins/step/build.groovy delete mode 100644 core/src/index/thirdparty/faiss/ci/jenkins/step/coverage.groovy delete mode 100755 core/src/index/thirdparty/faiss/ci/scripts/build.sh diff --git a/ci/jenkins/Jenkinsfile b/ci/jenkins/Jenkinsfile index 9c647146a6..ad2087f17c 100644 --- a/ci/jenkins/Jenkinsfile +++ b/ci/jenkins/Jenkinsfile @@ -133,8 +133,8 @@ pipeline { stage('Deploy to Development') { environment { - FROMAT_SEMVER = "${env.SEMVER}".replaceAll("\\.", "-") - FORMAT_OS_NAME = "${OS_NAME}".replaceAll("\\.", "-") + FROMAT_SEMVER = "${env.SEMVER}".replaceAll("\\.", "-").replaceAll("_", "-") + FORMAT_OS_NAME = "${OS_NAME}".replaceAll("\\.", "-").replaceAll("_", "-") HELM_RELEASE_NAME = "${env.PIPELINE_NAME}-${env.FROMAT_SEMVER}-${env.BUILD_NUMBER}-single-${FORMAT_OS_NAME}-${BINARY_VERSION}".toLowerCase() } diff --git a/ci/jenkins/pod/milvus-cpu-version-centos7-build-env-pod.yaml b/ci/jenkins/pod/milvus-cpu-version-centos7-build-env-pod.yaml index 47735937b6..cfbae6d9ce 100644 --- a/ci/jenkins/pod/milvus-cpu-version-centos7-build-env-pod.yaml +++ b/ci/jenkins/pod/milvus-cpu-version-centos7-build-env-pod.yaml @@ -25,7 +25,7 @@ spec: resources: limits: memory: "12Gi" - cpu: "4.0" + cpu: "6.0" requests: memory: "8Gi" cpu: "4.0" diff --git a/ci/jenkins/pod/milvus-cpu-version-ubuntu18.04-build-env-pod.yaml b/ci/jenkins/pod/milvus-cpu-version-ubuntu18.04-build-env-pod.yaml index f36569407a..4140d7a5ff 100644 --- a/ci/jenkins/pod/milvus-cpu-version-ubuntu18.04-build-env-pod.yaml +++ b/ci/jenkins/pod/milvus-cpu-version-ubuntu18.04-build-env-pod.yaml @@ -24,7 +24,7 @@ spec: resources: limits: memory: "12Gi" - cpu: "4.0" + cpu: "6.0" requests: memory: "8Gi" cpu: "4.0" diff --git a/ci/jenkins/pod/milvus-gpu-version-centos7-build-env-pod.yaml b/ci/jenkins/pod/milvus-gpu-version-centos7-build-env-pod.yaml index 55abdff5a3..4a7b970468 100644 --- a/ci/jenkins/pod/milvus-gpu-version-centos7-build-env-pod.yaml +++ b/ci/jenkins/pod/milvus-gpu-version-centos7-build-env-pod.yaml @@ -25,7 +25,7 @@ spec: resources: limits: memory: "12Gi" - cpu: "4.0" + cpu: "6.0" nvidia.com/gpu: 1 requests: memory: "8Gi" diff --git a/ci/jenkins/pod/milvus-gpu-version-ubuntu18.04-build-env-pod.yaml b/ci/jenkins/pod/milvus-gpu-version-ubuntu18.04-build-env-pod.yaml index 43a4152342..a739a344e0 100644 --- a/ci/jenkins/pod/milvus-gpu-version-ubuntu18.04-build-env-pod.yaml +++ b/ci/jenkins/pod/milvus-gpu-version-ubuntu18.04-build-env-pod.yaml @@ -24,7 +24,7 @@ spec: resources: limits: memory: "12Gi" - cpu: "4.0" + cpu: "6.0" nvidia.com/gpu: 1 requests: memory: "8Gi" diff --git a/ci/scripts/build.sh b/ci/scripts/build.sh index b440edff02..125677355b 100755 --- a/ci/scripts/build.sh +++ b/ci/scripts/build.sh @@ -19,13 +19,12 @@ FAISS_ROOT="" PRIVILEGES="OFF" CUSTOMIZATION="OFF" # default use origin faiss BUILD_COVERAGE="OFF" -USE_JFROG_CACHE="OFF" RUN_CPPLINT="OFF" GPU_VERSION="OFF" WITH_MKL="OFF" CUDA_COMPILER=/usr/local/cuda/bin/nvcc -while getopts "o:t:b:f:pgxulcjmh" arg +while getopts "o:t:b:f:pgxulcmh" arg do case $arg in o) @@ -59,9 +58,6 @@ do c) BUILD_COVERAGE="ON" ;; - j) - USE_JFROG_CACHE="ON" - ;; m) WITH_MKL="ON" ;; @@ -79,12 +75,11 @@ parameter: -u: building unit test options(default: OFF) -l: run cpplint, clang-format and clang-tidy(default: OFF) -c: code coverage(default: OFF) --j: use jfrog cache build directory(default: OFF) -m: build with MKL(default: OFF) -h: help usage: -./build.sh -o \${INSTALL_PREFIX} -t \${BUILD_TYPE} -b \${CORE_BUILD_DIR} -f \${FAISS_ROOT} [-p] [-g] [-x] [-u] [-l] [-c] [-j] [-m] [-h] +./build.sh -o \${INSTALL_PREFIX} -t \${BUILD_TYPE} -b \${CORE_BUILD_DIR} -f \${FAISS_ROOT} [-p] [-g] [-x] [-u] [-l] [-c] [-m] [-h] " exit 0 ;; @@ -112,7 +107,6 @@ CMAKE_CMD="cmake \ -DCUSTOMIZATION=${CUSTOMIZATION} \ -DBUILD_UNIT_TEST=${BUILD_UNITTEST} \ -DBUILD_COVERAGE=${BUILD_COVERAGE} \ --DUSE_JFROG_CACHE=${USE_JFROG_CACHE} \ -DFAISS_ROOT=${FAISS_ROOT} \ -DFAISS_WITH_MKL=${WITH_MKL} \ -DArrow_SOURCE=AUTO \ diff --git a/ci/scripts/check_ccache.sh b/ci/scripts/check_ccache.sh index c9c03ef149..076738ce2f 100755 --- a/ci/scripts/check_ccache.sh +++ b/ci/scripts/check_ccache.sh @@ -57,8 +57,8 @@ check_ccache() { fi } -if [[ -n "${CHANGE_BRANCH}" && "${BRANCH_NAME}" =~ "PR-" ]];then - check_ccache ${CHANGE_BRANCH} +if [[ -n "${CHANGE_TARGET}" && "${BRANCH_NAME}" =~ "PR-" ]];then + check_ccache ${CHANGE_TARGET} check_ccache ${BRANCH_NAME} fi diff --git a/core/build.sh b/core/build.sh index f1a9f9406d..5239e7060c 100755 --- a/core/build.sh +++ b/core/build.sh @@ -8,7 +8,6 @@ MAKE_CLEAN="OFF" BUILD_COVERAGE="OFF" DB_PATH="/tmp/milvus" PROFILING="OFF" -USE_JFROG_CACHE="OFF" RUN_CPPLINT="OFF" CUSTOMIZATION="OFF" # default use ori faiss CUDA_COMPILER=/usr/local/cuda/bin/nvcc @@ -19,7 +18,7 @@ FAISS_SOURCE="BUNDLED" WITH_PROMETHEUS="ON" FIU_ENABLE="OFF" -while getopts "p:d:t:f:ulrcgjhxzmei" arg; do +while getopts "p:d:t:f:ulrcghxzmei" arg; do case $arg in p) INSTALL_PREFIX=$OPTARG @@ -53,9 +52,6 @@ while getopts "p:d:t:f:ulrcgjhxzmei" arg; do z) PROFILING="ON" ;; - j) - USE_JFROG_CACHE="ON" - ;; x) CUSTOMIZATION="ON" ;; @@ -86,7 +82,6 @@ parameter: -r: remove previous build directory(default: OFF) -c: code coverage(default: OFF) -z: profiling(default: OFF) --j: use jfrog cache build directory(default: OFF) -g: build GPU version(default: OFF) -m: build with MKL(default: OFF) -e: build without prometheus(default: OFF) @@ -94,7 +89,7 @@ parameter: -h: help usage: -./build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} -f \${FAISS_ROOT} [-u] [-l] [-r] [-c] [-z] [-j] [-g] [-m] [-e] [-h] +./build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} -f \${FAISS_ROOT} [-u] [-l] [-r] [-c] [-z] [-g] [-m] [-e] [-h] " exit 0 ;; @@ -125,7 +120,6 @@ CMAKE_CMD="cmake \ -DBUILD_COVERAGE=${BUILD_COVERAGE} \ -DMILVUS_DB_PATH=${DB_PATH} \ -DMILVUS_ENABLE_PROFILING=${PROFILING} \ --DUSE_JFROG_CACHE=${USE_JFROG_CACHE} \ -DCUSTOMIZATION=${CUSTOMIZATION} \ -DMILVUS_GPU_VERSION=${GPU_VERSION} \ -DFAISS_WITH_MKL=${WITH_MKL} \ diff --git a/core/cmake/ThirdPartyPackages.cmake b/core/cmake/ThirdPartyPackages.cmake index 00be339cc2..6076c12157 100644 --- a/core/cmake/ThirdPartyPackages.cmake +++ b/core/cmake/ThirdPartyPackages.cmake @@ -131,43 +131,6 @@ endif (UNIX) # thirdparty directory set(THIRDPARTY_DIR "${MILVUS_SOURCE_DIR}/thirdparty") -# ---------------------------------------------------------------------- -# JFrog -if (NOT DEFINED USE_JFROG_CACHE) - set(USE_JFROG_CACHE "OFF") -endif () -if (USE_JFROG_CACHE STREQUAL "ON") - if (DEFINED ENV{JFROG_ARTFACTORY_URL}) - set(JFROG_ARTFACTORY_URL "$ENV{JFROG_ARTFACTORY_URL}") - endif () - if (NOT DEFINED JFROG_ARTFACTORY_URL) - message(FATAL_ERROR "JFROG_ARTFACTORY_URL is not set") - endif () - if (UBUNTU_FOUND) - set(JFROG_ARTFACTORY_CACHE_URL "${JFROG_ARTFACTORY_URL}/milvus/thirdparty/cache/${CMAKE_OS_NAME}/${UBUNTU_VERSION}/${MILVUS_BUILD_ARCH}/${BUILD_TYPE}") - else () - set(JFROG_ARTFACTORY_CACHE_URL "${JFROG_ARTFACTORY_URL}/milvus/thirdparty/cache/${CMAKE_OS_NAME}/${MILVUS_BUILD_ARCH}/${BUILD_TYPE}") - endif () - if (DEFINED ENV{JFROG_USER_NAME}) - set(JFROG_USER_NAME "$ENV{JFROG_USER_NAME}") - endif () - if (NOT DEFINED JFROG_USER_NAME) - message(FATAL_ERROR "JFROG_USER_NAME is not set") - endif () - if (DEFINED ENV{JFROG_PASSWORD}) - set(JFROG_PASSWORD "$ENV{JFROG_PASSWORD}") - endif () - if (NOT DEFINED JFROG_PASSWORD) - message(FATAL_ERROR "JFROG_PASSWORD is not set") - endif () - - set(THIRDPARTY_PACKAGE_CACHE "${THIRDPARTY_DIR}/cache") - if (NOT EXISTS ${THIRDPARTY_PACKAGE_CACHE}) - message(STATUS "Will create cached directory: ${THIRDPARTY_PACKAGE_CACHE}") - file(MAKE_DIRECTORY ${THIRDPARTY_PACKAGE_CACHE}) - endif () -endif () - macro(resolve_dependency DEPENDENCY_NAME) if (${DEPENDENCY_NAME}_SOURCE STREQUAL "AUTO") find_package(${DEPENDENCY_NAME} MODULE) @@ -400,48 +363,19 @@ macro(build_gtest) "${GTEST_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}gmock${CMAKE_STATIC_LIBRARY_SUFFIX}" ) - if (USE_JFROG_CACHE STREQUAL "ON") - set(GTEST_CACHE_PACKAGE_NAME "googletest_${GTEST_MD5}.tar.gz") - set(GTEST_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${GTEST_CACHE_PACKAGE_NAME}") - set(GTEST_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${GTEST_CACHE_PACKAGE_NAME}") - - file(DOWNLOAD ${GTEST_CACHE_URL} ${GTEST_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${GTEST_CACHE_URL} TO ${GTEST_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (NOT status_code EQUAL 0) - ExternalProject_Add(googletest_ep - URL - ${GTEST_SOURCE_URL} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - ${GTEST_STATIC_LIB} - ${GTEST_MAIN_STATIC_LIB} - ${GMOCK_STATIC_LIB} - CMAKE_ARGS - ${GTEST_CMAKE_ARGS} - ${EP_LOG_OPTIONS}) - - ExternalProject_Create_Cache(googletest_ep ${GTEST_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/googletest_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${GTEST_CACHE_URL}) - else () - ExternalProject_Use_Cache(googletest_ep ${GTEST_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - else () - ExternalProject_Add(googletest_ep - URL - ${GTEST_SOURCE_URL} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - ${GTEST_STATIC_LIB} - ${GTEST_MAIN_STATIC_LIB} - ${GMOCK_STATIC_LIB} - CMAKE_ARGS - ${GTEST_CMAKE_ARGS} - ${EP_LOG_OPTIONS}) - endif () + ExternalProject_Add(googletest_ep + URL + ${GTEST_SOURCE_URL} + BUILD_COMMAND + ${MAKE} + ${MAKE_BUILD_ARGS} + BUILD_BYPRODUCTS + ${GTEST_STATIC_LIB} + ${GTEST_MAIN_STATIC_LIB} + ${GMOCK_STATIC_LIB} + CMAKE_ARGS + ${GTEST_CMAKE_ARGS} + ${EP_LOG_OPTIONS}) # The include directory must exist before it is referenced by a target. file(MAKE_DIRECTORY "${GTEST_INCLUDE_DIR}") @@ -495,52 +429,19 @@ macro(build_mysqlpp) "CXXFLAGS=${EP_CXX_FLAGS}" "LDFLAGS=-pthread") - if (USE_JFROG_CACHE STREQUAL "ON") - set(MYSQLPP_CACHE_PACKAGE_NAME "mysqlpp_${MYSQLPP_MD5}.tar.gz") - set(MYSQLPP_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${MYSQLPP_CACHE_PACKAGE_NAME}") - set(MYSQLPP_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${MYSQLPP_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${MYSQLPP_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${MYSQLPP_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(mysqlpp_ep - URL - ${MYSQLPP_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${MYSQLPP_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - BUILD_BYPRODUCTS - ${MYSQLPP_SHARED_LIB}) - - ExternalProject_Create_Cache(mysqlpp_ep ${MYSQLPP_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/mysqlpp_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${MYSQLPP_CACHE_URL}) - else () - file(DOWNLOAD ${MYSQLPP_CACHE_URL} ${MYSQLPP_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${MYSQLPP_CACHE_URL} TO ${MYSQLPP_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(mysqlpp_ep ${MYSQLPP_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(mysqlpp_ep - URL - ${MYSQLPP_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${MYSQLPP_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - BUILD_BYPRODUCTS - ${MYSQLPP_SHARED_LIB}) - endif () + externalproject_add(mysqlpp_ep + URL + ${MYSQLPP_SOURCE_URL} + ${EP_LOG_OPTIONS} + CONFIGURE_COMMAND + "./configure" + ${MYSQLPP_CONFIGURE_ARGS} + BUILD_COMMAND + ${MAKE} ${MAKE_BUILD_ARGS} + BUILD_IN_SOURCE + 1 + BUILD_BYPRODUCTS + ${MYSQLPP_SHARED_LIB}) file(MAKE_DIRECTORY "${MYSQLPP_INCLUDE_DIR}") add_library(mysqlpp SHARED IMPORTED) @@ -586,78 +487,29 @@ macro(build_prometheus) "-DCMAKE_INSTALL_PREFIX=${PROMETHEUS_PREFIX}" -DCMAKE_BUILD_TYPE=Release) - if (USE_JFROG_CACHE STREQUAL "ON") - execute_process(COMMAND sh -c "git ls-remote --heads --tags ${PROMETHEUS_SOURCE_URL} ${PROMETHEUS_VERSION} | cut -f 1" OUTPUT_VARIABLE PROMETHEUS_LAST_COMMIT_ID) - if (${PROMETHEUS_LAST_COMMIT_ID} MATCHES "^[^#][a-z0-9]+") - string(MD5 PROMETHEUS_COMBINE_MD5 "${PROMETHEUS_LAST_COMMIT_ID}") - set(PROMETHEUS_CACHE_PACKAGE_NAME "prometheus_${PROMETHEUS_COMBINE_MD5}.tar.gz") - set(PROMETHEUS_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${PROMETHEUS_CACHE_PACKAGE_NAME}") - set(PROMETHEUS_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${PROMETHEUS_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${PROMETHEUS_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${PROMETHEUS_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(prometheus_ep - GIT_REPOSITORY - ${PROMETHEUS_SOURCE_URL} - GIT_TAG - ${PROMETHEUS_VERSION} - GIT_SHALLOW - TRUE - ${EP_LOG_OPTIONS} - CMAKE_ARGS - ${PROMETHEUS_CMAKE_ARGS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} - "DESTDIR=${PROMETHEUS_PREFIX}" - install - BUILD_BYPRODUCTS - "${PROMETHEUS_CORE_STATIC_LIB}" - "${PROMETHEUS_PUSH_STATIC_LIB}" - "${PROMETHEUS_PULL_STATIC_LIB}") - - ExternalProject_Create_Cache(prometheus_ep ${PROMETHEUS_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/prometheus_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${PROMETHEUS_CACHE_URL}) - else () - file(DOWNLOAD ${PROMETHEUS_CACHE_URL} ${PROMETHEUS_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${PROMETHEUS_CACHE_URL} TO ${PROMETHEUS_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(prometheus_ep ${PROMETHEUS_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - message(FATAL_ERROR "The last commit ID of \"${PROMETHEUS_SOURCE_URL}\" repository don't match!") - endif () - else () - externalproject_add(prometheus_ep - GIT_REPOSITORY - ${PROMETHEUS_SOURCE_URL} - GIT_TAG - ${PROMETHEUS_VERSION} - GIT_SHALLOW - TRUE - ${EP_LOG_OPTIONS} - CMAKE_ARGS - ${PROMETHEUS_CMAKE_ARGS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} - "DESTDIR=${PROMETHEUS_PREFIX}" - install - BUILD_BYPRODUCTS - "${PROMETHEUS_CORE_STATIC_LIB}" - "${PROMETHEUS_PUSH_STATIC_LIB}" - "${PROMETHEUS_PULL_STATIC_LIB}") - endif () + externalproject_add(prometheus_ep + GIT_REPOSITORY + ${PROMETHEUS_SOURCE_URL} + GIT_TAG + ${PROMETHEUS_VERSION} + GIT_SHALLOW + TRUE + ${EP_LOG_OPTIONS} + CMAKE_ARGS + ${PROMETHEUS_CMAKE_ARGS} + BUILD_COMMAND + ${MAKE} + ${MAKE_BUILD_ARGS} + BUILD_IN_SOURCE + 1 + INSTALL_COMMAND + ${MAKE} + "DESTDIR=${PROMETHEUS_PREFIX}" + install + BUILD_BYPRODUCTS + "${PROMETHEUS_CORE_STATIC_LIB}" + "${PROMETHEUS_PUSH_STATIC_LIB}" + "${PROMETHEUS_PULL_STATIC_LIB}") file(MAKE_DIRECTORY "${PROMETHEUS_PREFIX}/push/include") add_library(prometheus-cpp-push STATIC IMPORTED) @@ -713,54 +565,20 @@ macro(build_sqlite) "CFLAGS=${EP_C_FLAGS}" "CXXFLAGS=${EP_CXX_FLAGS}") - if (USE_JFROG_CACHE STREQUAL "ON") - set(SQLITE_CACHE_PACKAGE_NAME "sqlite_${SQLITE_MD5}.tar.gz") - set(SQLITE_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${SQLITE_CACHE_PACKAGE_NAME}") - set(SQLITE_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${SQLITE_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${SQLITE_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${SQLITE_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(sqlite_ep - URL - ${SQLITE_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${SQLITE_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - BUILD_BYPRODUCTS - "${SQLITE_STATIC_LIB}") - - ExternalProject_Create_Cache(sqlite_ep ${SQLITE_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/sqlite_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${SQLITE_CACHE_URL}) - else () - file(DOWNLOAD ${SQLITE_CACHE_URL} ${SQLITE_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${SQLITE_CACHE_URL} TO ${SQLITE_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(sqlite_ep ${SQLITE_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(sqlite_ep - URL - ${SQLITE_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${SQLITE_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - BUILD_BYPRODUCTS - "${SQLITE_STATIC_LIB}") - endif () + externalproject_add(sqlite_ep + URL + ${SQLITE_SOURCE_URL} + ${EP_LOG_OPTIONS} + CONFIGURE_COMMAND + "./configure" + ${SQLITE_CONFIGURE_ARGS} + BUILD_COMMAND + ${MAKE} + ${MAKE_BUILD_ARGS} + BUILD_IN_SOURCE + 1 + BUILD_BYPRODUCTS + "${SQLITE_STATIC_LIB}") file(MAKE_DIRECTORY "${SQLITE_INCLUDE_DIR}") add_library(sqlite STATIC IMPORTED) @@ -802,7 +620,7 @@ macro(build_sqlite_orm) list(GET status 1 status_string) if(status_code EQUAL 0) - message(STATUS "Downloading ... done") + message(STATUS "Downloading SQLITE_ORM ... done") set(IS_EXIST_FILE TRUE) break() else() @@ -851,48 +669,17 @@ macro(build_yamlcpp) -DYAML_CPP_BUILD_TESTS=OFF -DYAML_CPP_BUILD_TOOLS=OFF) - if (USE_JFROG_CACHE STREQUAL "ON") - set(YAMLCPP_CACHE_PACKAGE_NAME "yaml-cpp_${YAMLCPP_MD5}.tar.gz") - set(YAMLCPP_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${YAMLCPP_CACHE_PACKAGE_NAME}") - set(YAMLCPP_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${YAMLCPP_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${YAMLCPP_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${YAMLCPP_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(yaml-cpp_ep - URL - ${YAMLCPP_SOURCE_URL} - ${EP_LOG_OPTIONS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - "${YAMLCPP_STATIC_LIB}" - CMAKE_ARGS - ${YAMLCPP_CMAKE_ARGS}) - - ExternalProject_Create_Cache(yaml-cpp_ep ${YAMLCPP_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/yaml-cpp_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${YAMLCPP_CACHE_URL}) - else () - file(DOWNLOAD ${YAMLCPP_CACHE_URL} ${YAMLCPP_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${YAMLCPP_CACHE_URL} TO ${YAMLCPP_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(yaml-cpp_ep ${YAMLCPP_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(yaml-cpp_ep - URL - ${YAMLCPP_SOURCE_URL} - ${EP_LOG_OPTIONS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - "${YAMLCPP_STATIC_LIB}" - CMAKE_ARGS - ${YAMLCPP_CMAKE_ARGS}) - endif () + externalproject_add(yaml-cpp_ep + URL + ${YAMLCPP_SOURCE_URL} + ${EP_LOG_OPTIONS} + BUILD_COMMAND + ${MAKE} + ${MAKE_BUILD_ARGS} + BUILD_BYPRODUCTS + "${YAMLCPP_STATIC_LIB}" + CMAKE_ARGS + ${YAMLCPP_CMAKE_ARGS}) file(MAKE_DIRECTORY "${YAMLCPP_INCLUDE_DIR}") add_library(yaml-cpp STATIC IMPORTED) @@ -921,56 +708,21 @@ macro(build_libunwind) set(LIBUNWIND_SHARED_LIB "${LIBUNWIND_PREFIX}/lib/libunwind${CMAKE_SHARED_LIBRARY_SUFFIX}") set(LIBUNWIND_CONFIGURE_ARGS "--prefix=${LIBUNWIND_PREFIX}") - if (USE_JFROG_CACHE STREQUAL "ON") - set(LIBUNWIND_CACHE_PACKAGE_NAME "libunwind_${LIBUNWIND_MD5}.tar.gz") - set(LIBUNWIND_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${LIBUNWIND_CACHE_PACKAGE_NAME}") - set(LIBUNWIND_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${LIBUNWIND_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${LIBUNWIND_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${LIBUNWIND_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(libunwind_ep - URL - ${LIBUNWIND_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${LIBUNWIND_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} install - BUILD_BYPRODUCTS - ${LIBUNWIND_SHARED_LIB}) - - ExternalProject_Create_Cache(libunwind_ep ${LIBUNWIND_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/libunwind_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${LIBUNWIND_CACHE_URL}) - else () - file(DOWNLOAD ${LIBUNWIND_CACHE_URL} ${LIBUNWIND_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${LIBUNWIND_CACHE_URL} TO ${LIBUNWIND_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(libunwind_ep ${LIBUNWIND_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(libunwind_ep - URL - ${LIBUNWIND_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${LIBUNWIND_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} install - BUILD_BYPRODUCTS - ${LIBUNWIND_SHARED_LIB}) - endif () + externalproject_add(libunwind_ep + URL + ${LIBUNWIND_SOURCE_URL} + ${EP_LOG_OPTIONS} + CONFIGURE_COMMAND + "./configure" + ${LIBUNWIND_CONFIGURE_ARGS} + BUILD_COMMAND + ${MAKE} ${MAKE_BUILD_ARGS} + BUILD_IN_SOURCE + 1 + INSTALL_COMMAND + ${MAKE} install + BUILD_BYPRODUCTS + ${LIBUNWIND_SHARED_LIB}) file(MAKE_DIRECTORY "${LIBUNWIND_INCLUDE_DIR}") @@ -999,56 +751,21 @@ macro(build_gperftools) set(GPERFTOOLS_STATIC_LIB "${GPERFTOOLS_PREFIX}/lib/libprofiler${CMAKE_STATIC_LIBRARY_SUFFIX}") set(GPERFTOOLS_CONFIGURE_ARGS "--prefix=${GPERFTOOLS_PREFIX}") - if (USE_JFROG_CACHE STREQUAL "ON") - set(GPERFTOOLS_CACHE_PACKAGE_NAME "gperftools_${GPERFTOOLS_MD5}.tar.gz") - set(GPERFTOOLS_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${GPERFTOOLS_CACHE_PACKAGE_NAME}") - set(GPERFTOOLS_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${GPERFTOOLS_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${GPERFTOOLS_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${GPERFTOOLS_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(gperftools_ep - URL - ${GPERFTOOLS_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${GPERFTOOLS_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} install - BUILD_BYPRODUCTS - ${GPERFTOOLS_STATIC_LIB}) - - ExternalProject_Create_Cache(gperftools_ep ${GPERFTOOLS_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/gperftools_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${GPERFTOOLS_CACHE_URL}) - else () - file(DOWNLOAD ${GPERFTOOLS_CACHE_URL} ${GPERFTOOLS_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${GPERFTOOLS_CACHE_URL} TO ${GPERFTOOLS_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(gperftools_ep ${GPERFTOOLS_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(gperftools_ep - URL - ${GPERFTOOLS_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${GPERFTOOLS_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} install - BUILD_BYPRODUCTS - ${GPERFTOOLS_STATIC_LIB}) - endif () + externalproject_add(gperftools_ep + URL + ${GPERFTOOLS_SOURCE_URL} + ${EP_LOG_OPTIONS} + CONFIGURE_COMMAND + "./configure" + ${GPERFTOOLS_CONFIGURE_ARGS} + BUILD_COMMAND + ${MAKE} ${MAKE_BUILD_ARGS} + BUILD_IN_SOURCE + 1 + INSTALL_COMMAND + ${MAKE} install + BUILD_BYPRODUCTS + ${GPERFTOOLS_STATIC_LIB}) ExternalProject_Add_StepDependencies(gperftools_ep build libunwind_ep) @@ -1086,67 +803,26 @@ macro(build_grpc) set(GRPC_PROTOBUF_STATIC_LIB "${GRPC_PROTOBUF_LIB_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}protobuf${CMAKE_STATIC_LIBRARY_SUFFIX}") set(GRPC_PROTOC_STATIC_LIB "${GRPC_PROTOBUF_LIB_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}protoc${CMAKE_STATIC_LIBRARY_SUFFIX}") - if (USE_JFROG_CACHE STREQUAL "ON") - set(GRPC_CACHE_PACKAGE_NAME "grpc_${GRPC_MD5}.tar.gz") - set(GRPC_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${GRPC_CACHE_PACKAGE_NAME}") - set(GRPC_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${GRPC_CACHE_PACKAGE_NAME}") + externalproject_add(grpc_ep + URL + ${GRPC_SOURCE_URL} + ${EP_LOG_OPTIONS} + CONFIGURE_COMMAND + "" + BUILD_IN_SOURCE + 1 + BUILD_COMMAND + ${MAKE} ${MAKE_BUILD_ARGS} prefix=${GRPC_PREFIX} + INSTALL_COMMAND + ${MAKE} install prefix=${GRPC_PREFIX} + BUILD_BYPRODUCTS + ${GRPC_STATIC_LIB} + ${GRPC++_STATIC_LIB} + ${GRPCPP_CHANNELZ_STATIC_LIB} + ${GRPC_PROTOBUF_STATIC_LIB} + ${GRPC_PROTOC_STATIC_LIB}) - execute_process(COMMAND wget -q --method HEAD ${GRPC_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${GRPC_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(grpc_ep - URL - ${GRPC_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "" - BUILD_IN_SOURCE - 1 - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} prefix=${GRPC_PREFIX} - INSTALL_COMMAND - ${MAKE} install prefix=${GRPC_PREFIX} - BUILD_BYPRODUCTS - ${GRPC_STATIC_LIB} - ${GRPC++_STATIC_LIB} - ${GRPCPP_CHANNELZ_STATIC_LIB} - ${GRPC_PROTOBUF_STATIC_LIB} - ${GRPC_PROTOC_STATIC_LIB}) - - ExternalProject_Add_StepDependencies(grpc_ep build zlib_ep) - - ExternalProject_Create_Cache(grpc_ep ${GRPC_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/grpc_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${GRPC_CACHE_URL}) - else () - file(DOWNLOAD ${GRPC_CACHE_URL} ${GRPC_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${GRPC_CACHE_URL} TO ${GRPC_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(grpc_ep ${GRPC_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(grpc_ep - URL - ${GRPC_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "" - BUILD_IN_SOURCE - 1 - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} prefix=${GRPC_PREFIX} - INSTALL_COMMAND - ${MAKE} install prefix=${GRPC_PREFIX} - BUILD_BYPRODUCTS - ${GRPC_STATIC_LIB} - ${GRPC++_STATIC_LIB} - ${GRPCPP_CHANNELZ_STATIC_LIB} - ${GRPC_PROTOBUF_STATIC_LIB} - ${GRPC_PROTOC_STATIC_LIB}) - - ExternalProject_Add_StepDependencies(grpc_ep build zlib_ep) - - endif () + ExternalProject_Add_StepDependencies(grpc_ep build zlib_ep) file(MAKE_DIRECTORY "${GRPC_INCLUDE_DIR}") @@ -1209,48 +885,17 @@ macro(build_zlib) set(ZLIB_CMAKE_ARGS ${EP_COMMON_CMAKE_ARGS} "-DCMAKE_INSTALL_PREFIX=${ZLIB_PREFIX}" -DBUILD_SHARED_LIBS=OFF) - if (USE_JFROG_CACHE STREQUAL "ON") - set(ZLIB_CACHE_PACKAGE_NAME "zlib_${ZLIB_MD5}.tar.gz") - set(ZLIB_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${ZLIB_CACHE_PACKAGE_NAME}") - set(ZLIB_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${ZLIB_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${ZLIB_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${ZLIB_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(zlib_ep - URL - ${ZLIB_SOURCE_URL} - ${EP_LOG_OPTIONS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - "${ZLIB_STATIC_LIB}" - CMAKE_ARGS - ${ZLIB_CMAKE_ARGS}) - - ExternalProject_Create_Cache(zlib_ep ${ZLIB_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/zlib_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${ZLIB_CACHE_URL}) - else () - file(DOWNLOAD ${ZLIB_CACHE_URL} ${ZLIB_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${ZLIB_CACHE_URL} TO ${ZLIB_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(zlib_ep ${ZLIB_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(zlib_ep - URL - ${ZLIB_SOURCE_URL} - ${EP_LOG_OPTIONS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - "${ZLIB_STATIC_LIB}" - CMAKE_ARGS - ${ZLIB_CMAKE_ARGS}) - endif () + externalproject_add(zlib_ep + URL + ${ZLIB_SOURCE_URL} + ${EP_LOG_OPTIONS} + BUILD_COMMAND + ${MAKE} + ${MAKE_BUILD_ARGS} + BUILD_BYPRODUCTS + "${ZLIB_STATIC_LIB}" + CMAKE_ARGS + ${ZLIB_CMAKE_ARGS}) file(MAKE_DIRECTORY "${ZLIB_INCLUDE_DIR}") add_library(zlib STATIC IMPORTED) diff --git a/core/src/index/build.sh b/core/src/index/build.sh index 357ac5693a..8557e0e946 100644 --- a/core/src/index/build.sh +++ b/core/src/index/build.sh @@ -6,9 +6,8 @@ INSTALL_PREFIX=$(pwd)/cmake_build MAKE_CLEAN="OFF" PROFILING="OFF" FAISS_WITH_MKL="OFF" -USE_JFROG_CACHE="OFF" -while getopts "p:d:t:uhrcgmj" arg +while getopts "p:d:t:uhrcgm" arg do case $arg in t) @@ -33,9 +32,6 @@ do m) FAISS_WITH_MKL="ON" ;; - j) - USE_JFROG_CACHE="ON" - ;; h) # help echo " @@ -75,7 +71,6 @@ if [[ ${MAKE_CLEAN} == "ON" ]]; then -DCMAKE_CUDA_COMPILER=${CUDA_COMPILER} \ -DMILVUS_ENABLE_PROFILING=${PROFILING} \ -DFAISS_WITH_MKL=${FAISS_WITH_MKL} \ - -DUSE_JFROG_CACHE=${USE_JFROG_CACHE} \ ../" echo ${CMAKE_CMD} diff --git a/core/src/index/cmake/ThirdPartyPackagesCore.cmake b/core/src/index/cmake/ThirdPartyPackagesCore.cmake index ec0df49a48..b905aed355 100644 --- a/core/src/index/cmake/ThirdPartyPackagesCore.cmake +++ b/core/src/index/cmake/ThirdPartyPackagesCore.cmake @@ -115,20 +115,6 @@ endif (UNIX) # thirdparty directory set(THIRDPARTY_DIR "${INDEX_SOURCE_DIR}/thirdparty") -# ---------------------------------------------------------------------- -# JFrog -if (NOT DEFINED USE_JFROG_CACHE) - set(USE_JFROG_CACHE "OFF") -endif () -if (USE_JFROG_CACHE STREQUAL "ON") - set(JFROG_ARTFACTORY_CACHE_URL "${JFROG_ARTFACTORY_URL}/milvus/thirdparty/cache/${CMAKE_OS_NAME}/${KNOWHERE_BUILD_ARCH}/${BUILD_TYPE}") - set(THIRDPARTY_PACKAGE_CACHE "${THIRDPARTY_DIR}/cache") - if (NOT EXISTS ${THIRDPARTY_PACKAGE_CACHE}) - message(STATUS "Will create cached directory: ${THIRDPARTY_PACKAGE_CACHE}") - file(MAKE_DIRECTORY ${THIRDPARTY_PACKAGE_CACHE}) - endif () -endif () - # ---------------------------------------------------------------------- # ExternalProject options @@ -284,71 +270,25 @@ macro(build_arrow) -DBOOST_SOURCE=AUTO #try to find BOOST in the system default locations and build from source if not found ) - - if (USE_JFROG_CACHE STREQUAL "ON") - execute_process(COMMAND sh -c "git ls-remote --heads --tags ${ARROW_SOURCE_URL} ${ARROW_VERSION} | cut -f 1" OUTPUT_VARIABLE ARROW_LAST_COMMIT_ID) - if (${ARROW_LAST_COMMIT_ID} MATCHES "^[^#][a-z0-9]+") - string(MD5 ARROW_COMBINE_MD5 "${ARROW_LAST_COMMIT_ID}") - set(ARROW_CACHE_PACKAGE_NAME "arrow_${ARROW_COMBINE_MD5}.tar.gz") - set(ARROW_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${ARROW_CACHE_PACKAGE_NAME}") - set(ARROW_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${ARROW_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${ARROW_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${ARROW_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(arrow_ep - GIT_REPOSITORY - ${ARROW_SOURCE_URL} - GIT_TAG - ${ARROW_VERSION} - GIT_SHALLOW - TRUE - SOURCE_SUBDIR - cpp - ${EP_LOG_OPTIONS} - CMAKE_ARGS - ${ARROW_CMAKE_ARGS} - BUILD_COMMAND - "" - INSTALL_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} install - BUILD_BYPRODUCTS - "${ARROW_STATIC_LIB}" - ) - - ExternalProject_Create_Cache(arrow_ep ${ARROW_CACHE_PACKAGE_PATH} "${INDEX_BINARY_DIR}/arrow_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${ARROW_CACHE_URL}) - else () - file(DOWNLOAD ${ARROW_CACHE_URL} ${ARROW_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${ARROW_CACHE_URL} TO ${ARROW_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(arrow_ep ${ARROW_CACHE_PACKAGE_PATH} ${INDEX_BINARY_DIR}) - endif () - endif () - else () - message(FATAL_ERROR "The last commit ID of \"${ARROW_SOURCE_URL}\" repository don't match!") - endif () - else () - externalproject_add(arrow_ep - GIT_REPOSITORY - ${ARROW_SOURCE_URL} - GIT_TAG - ${ARROW_VERSION} - GIT_SHALLOW - TRUE - SOURCE_SUBDIR - cpp - ${EP_LOG_OPTIONS} - CMAKE_ARGS - ${ARROW_CMAKE_ARGS} - BUILD_COMMAND - "" - INSTALL_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} install - BUILD_BYPRODUCTS - "${ARROW_STATIC_LIB}" - ) - endif () + externalproject_add(arrow_ep + GIT_REPOSITORY + ${ARROW_SOURCE_URL} + GIT_TAG + ${ARROW_VERSION} + GIT_SHALLOW + TRUE + SOURCE_SUBDIR + cpp + ${EP_LOG_OPTIONS} + CMAKE_ARGS + ${ARROW_CMAKE_ARGS} + BUILD_COMMAND + "" + INSTALL_COMMAND + ${MAKE} ${MAKE_BUILD_ARGS} install + BUILD_BYPRODUCTS + "${ARROW_STATIC_LIB}" + ) file(MAKE_DIRECTORY "${ARROW_INCLUDE_DIR}") add_library(arrow STATIC IMPORTED) @@ -409,39 +349,6 @@ macro(build_gtest) "${GTEST_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}gmock${CMAKE_STATIC_LIBRARY_SUFFIX}" ) - - if (USE_JFROG_CACHE STREQUAL "ON") - set(GTEST_CACHE_PACKAGE_NAME "googletest_${GTEST_MD5}.tar.gz") - set(GTEST_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${GTEST_CACHE_PACKAGE_NAME}") - set(GTEST_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${GTEST_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${GTEST_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${GTEST_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - ExternalProject_Add(googletest_ep - URL - ${GTEST_SOURCE_URL} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - ${GTEST_STATIC_LIB} - ${GTEST_MAIN_STATIC_LIB} - ${GMOCK_STATIC_LIB} - CMAKE_ARGS - ${GTEST_CMAKE_ARGS} - ${EP_LOG_OPTIONS}) - - ExternalProject_Create_Cache(googletest_ep ${GTEST_CACHE_PACKAGE_PATH} "${INDEX_BINARY_DIR}/googletest_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${GTEST_CACHE_URL}) - else () - file(DOWNLOAD ${GTEST_CACHE_URL} ${GTEST_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${GTEST_CACHE_URL} TO ${GTEST_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(googletest_ep ${GTEST_CACHE_PACKAGE_PATH} ${INDEX_BINARY_DIR}) - endif () - endif () - else () ExternalProject_Add(googletest_ep URL ${GTEST_SOURCE_URL} @@ -455,7 +362,6 @@ macro(build_gtest) CMAKE_ARGS ${GTEST_CMAKE_ARGS} ${EP_LOG_OPTIONS}) - endif () # The include directory must exist before it is referenced by a target. file(MAKE_DIRECTORY "${GTEST_INCLUDE_DIR}") @@ -564,86 +470,40 @@ macro(build_faiss) set(FAISS_CONFIGURE_ARGS ${FAISS_CONFIGURE_ARGS} --without-cuda) endif () - if (USE_JFROG_CACHE STREQUAL "ON") - string(MD5 FAISS_COMBINE_MD5 "${FAISS_MD5}${LAPACK_MD5}${OPENBLAS_MD5}") - if (KNOWHERE_GPU_VERSION) - set(FAISS_COMPUTE_TYPE "gpu") - else () - set(FAISS_COMPUTE_TYPE "cpu") - endif () - if (FAISS_WITH_MKL) - set(FAISS_CACHE_PACKAGE_NAME "faiss_${FAISS_COMPUTE_TYPE}_mkl_${FAISS_COMBINE_MD5}.tar.gz") - else () - set(FAISS_CACHE_PACKAGE_NAME "faiss_${FAISS_COMPUTE_TYPE}_openblas_${FAISS_COMBINE_MD5}.tar.gz") - endif () - set(FAISS_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${FAISS_CACHE_PACKAGE_NAME}") - set(FAISS_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${FAISS_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${FAISS_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${FAISS_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(faiss_ep - URL - ${FAISS_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${FAISS_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} all - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} install - BUILD_BYPRODUCTS - ${FAISS_STATIC_LIB}) - - ExternalProject_Create_Cache(faiss_ep ${FAISS_CACHE_PACKAGE_PATH} "${INDEX_BINARY_DIR}/faiss_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${FAISS_CACHE_URL}) - else () - file(DOWNLOAD ${FAISS_CACHE_URL} ${FAISS_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${FAISS_CACHE_URL} TO ${FAISS_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(faiss_ep ${FAISS_CACHE_PACKAGE_PATH} ${INDEX_BINARY_DIR}) - endif () - endif () + if (CUSTOMIZATION) + externalproject_add(faiss_ep + DOWNLOAD_COMMAND + "" + SOURCE_DIR + ${FAISS_SOURCE_DIR} + ${EP_LOG_OPTIONS} + CONFIGURE_COMMAND + "./configure" + ${FAISS_CONFIGURE_ARGS} + BUILD_COMMAND + ${MAKE} ${MAKE_BUILD_ARGS} all + BUILD_IN_SOURCE + 1 + INSTALL_COMMAND + ${MAKE} install + BUILD_BYPRODUCTS + ${FAISS_STATIC_LIB}) else () - if (CUSTOMIZATION) - externalproject_add(faiss_ep - DOWNLOAD_COMMAND - "" - SOURCE_DIR - ${FAISS_SOURCE_DIR} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${FAISS_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} all - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} install - BUILD_BYPRODUCTS - ${FAISS_STATIC_LIB}) - else () - externalproject_add(faiss_ep - URL - ${FAISS_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "./configure" - ${FAISS_CONFIGURE_ARGS} - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} all - BUILD_IN_SOURCE - 1 - INSTALL_COMMAND - ${MAKE} install - BUILD_BYPRODUCTS - ${FAISS_STATIC_LIB}) - endif () - + externalproject_add(faiss_ep + URL + ${FAISS_SOURCE_URL} + ${EP_LOG_OPTIONS} + CONFIGURE_COMMAND + "./configure" + ${FAISS_CONFIGURE_ARGS} + BUILD_COMMAND + ${MAKE} ${MAKE_BUILD_ARGS} all + BUILD_IN_SOURCE + 1 + INSTALL_COMMAND + ${MAKE} install + BUILD_BYPRODUCTS + ${FAISS_STATIC_LIB}) endif () file(MAKE_DIRECTORY "${FAISS_INCLUDE_DIR}") diff --git a/core/src/index/thirdparty/faiss/ci/jenkins/Jenkinsfile b/core/src/index/thirdparty/faiss/ci/jenkins/Jenkinsfile deleted file mode 100644 index 347e53a693..0000000000 --- a/core/src/index/thirdparty/faiss/ci/jenkins/Jenkinsfile +++ /dev/null @@ -1,579 +0,0 @@ -pipeline { - agent none - - options { - timestamps() - } - - parameters{ - choice choices: ['Release', 'Debug'], description: 'Build Type', name: 'BUILD_TYPE' - choice choices: ['False', 'True'], description: 'Whether it is origin Faiss', name: 'IS_ORIGIN_FAISS' - string defaultValue: 'registry.zilliz.com', description: 'DOCKER REGISTRY URL', name: 'DOKCER_REGISTRY_URL', trim: true - string defaultValue: 'a54e38ef-c424-4ea9-9224-b25fc20e3924', description: 'DOCKER CREDENTIALS ID', name: 'DOCKER_CREDENTIALS_ID', trim: true - string defaultValue: 'http://192.168.1.201/artifactory/milvus', description: 'JFROG ARTFACTORY URL', name: 'JFROG_ARTFACTORY_URL', trim: true - string defaultValue: '76fd48ab-2b8e-4eed-834d-2eefd23bb3a6', description: 'JFROG CREDENTIALS ID', name: 'JFROG_CREDENTIALS_ID', trim: true - } - - environment { - PROJECT_NAME = "milvus" - LOWER_BUILD_TYPE = params.BUILD_TYPE.toLowerCase() - SEMVER = "0.6.0" - JOBNAMES = env.JOB_NAME.split('/') - PIPELINE_NAME = "${JOBNAMES[0]}" - FAISS_ROOT_PATH="/usr/local/faiss" - NATIVE_FAISS_VERSION="1.6.0" - } - - stages { - stage("Ubuntu 18.04 x86_64") { - environment { - OS_NAME = "ubuntu18.04" - CPU_ARCH = "amd64" - } - - parallel { - stage("GPU Version") { - environment { - BINRARY_VERSION = "gpu" - PACKAGE_VERSION = VersionNumber([ - versionNumberString : '${SEMVER}-gpu-${OS_NAME}-${CPU_ARCH}-${LOWER_BUILD_TYPE}-${BUILD_DATE_FORMATTED, "yyyyMMdd"}-${BUILDS_TODAY}' - ]); - DOCKER_VERSION = "${SEMVER}-gpu-${OS_NAME}-${LOWER_BUILD_TYPE}" - } - - stages { - stage("Run Build") { - agent { - kubernetes { - label "${env.BINRARY_VERSION}-build" - defaultContainer 'jnlp' - yaml """ -apiVersion: v1 -kind: Pod -metadata: - name: milvus-gpu-build-env - labels: - app: milvus - componet: gpu-build-env -spec: - containers: - - name: milvus-gpu-build-env - image: registry.zilliz.com/milvus/milvus-gpu-build-env:v0.6.0-ubuntu18.04 - env: - - name: POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: BUILD_ENV_IMAGE_ID - value: "da9023b0f858f072672f86483a869aa87e90a5140864f89e5a012ec766d96dea" - command: - - cat - tty: true - resources: - limits: - memory: "24Gi" - cpu: "8.0" - nvidia.com/gpu: 1 - requests: - memory: "16Gi" - cpu: "4.0" - - name: milvus-mysql - image: mysql:5.6 - env: - - name: MYSQL_ROOT_PASSWORD - value: 123456 - ports: - - containerPort: 3306 - name: mysql - """ - } - } - - stages { - stage('Prepare') { - steps { - container("milvus-${env.BINRARY_VERSION}-build-env") { - script { - dir ("milvus") { - checkout([$class: 'GitSCM', branches: [[name: "${env.SEMVER}"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus.git", name: 'origin', refspec: "+refs/heads/${env.SEMVER}:refs/remotes/origin/${env.SEMVER}"]]]) - } - } - } - } - } - - stage('Build') { - steps { - container("milvus-${env.BINRARY_VERSION}-build-env") { - script { - load "${env.WORKSPACE}/ci/jenkins/step/build.groovy" - } - } - } - } - stage('Code Coverage') { - steps { - container("milvus-${env.BINRARY_VERSION}-build-env") { - script { - load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy" - } - } - } - } - stage('Upload Package') { - steps { - container("milvus-${env.BINRARY_VERSION}-build-env") { - script { - dir ("milvus") { - load "ci/jenkins/step/package.groovy" - } - } - } - } - } - } - } - - stage("Publish docker images") { - agent { - kubernetes { - label "${BINRARY_VERSION}-publish" - defaultContainer 'jnlp' - yaml """ -apiVersion: v1 -kind: Pod -metadata: - labels: - app: publish - componet: docker -spec: - containers: - - name: publish-images - image: registry.zilliz.com/library/docker:v1.0.0 - securityContext: - privileged: true - command: - - cat - tty: true - volumeMounts: - - name: docker-sock - mountPath: /var/run/docker.sock - volumes: - - name: docker-sock - hostPath: - path: /var/run/docker.sock -""" - } - } - - stages { - stage('Prepare') { - steps { - container('publish-images') { - script { - dir ("milvus") { - checkout([$class: 'GitSCM', branches: [[name: "${env.SEMVER}"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus.git", name: 'origin', refspec: "+refs/heads/${env.SEMVER}:refs/remotes/origin/${env.SEMVER}"]]]) - } - } - } - } - } - - stage('Publish') { - steps { - container('publish-images') { - script { - dir ("milvus") { - load "ci/jenkins/step/publishImages.groovy" - } - } - } - } - } - } - } - - stage("Deploy to Development") { - environment { - FROMAT_SEMVER = "${env.SEMVER}".replaceAll("\\.", "-") - HELM_RELEASE_NAME = "${env.PIPELINE_NAME}-${env.FROMAT_SEMVER}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}".toLowerCase() - } - - agent { - kubernetes { - label "${env.BINRARY_VERSION}-dev-test" - defaultContainer 'jnlp' - yaml """ -apiVersion: v1 -kind: Pod -metadata: - labels: - app: milvus - componet: test-env -spec: - containers: - - name: milvus-test-env - image: registry.zilliz.com/milvus/milvus-test-env:v0.1 - command: - - cat - tty: true - volumeMounts: - - name: kubeconf - mountPath: /root/.kube/ - readOnly: true - volumes: - - name: kubeconf - secret: - secretName: test-cluster-config -""" - } - } - - stages { - stage('Prepare') { - steps { - container('milvus-test-env') { - script { - dir ("milvus") { - checkout([$class: 'GitSCM', branches: [[name: "${env.SEMVER}"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus.git", name: 'origin', refspec: "+refs/heads/${env.SEMVER}:refs/remotes/origin/${env.SEMVER}"]]]) - } - } - } - } - } - - stage("Deploy to Dev") { - steps { - container('milvus-test-env') { - script { - dir ("milvus") { - load "ci/jenkins/step/deploySingle2Dev.groovy" - } - } - } - } - } - - stage("Dev Test") { - steps { - container('milvus-test-env') { - script { - boolean isNightlyTest = isTimeTriggeredBuild() - if (isNightlyTest) { - dir ("milvus") { - load "ci/jenkins/step/singleDevNightlyTest.groovy" - } - } else { - dir ("milvus") { - load "ci/jenkins/step/singleDevTest.groovy" - } - } - } - } - } - } - - stage ("Cleanup Dev") { - steps { - container('milvus-test-env') { - script { - dir ("milvus") { - load "ci/jenkins/step/cleanupSingleDev.groovy" - } - } - } - } - } - } - post { - unsuccessful { - container('milvus-test-env') { - script { - dir ("milvus") { - load "ci/jenkins/step/cleanupSingleDev.groovy" - } - } - } - } - } - } - } - } - - stage("CPU Version") { - environment { - BINRARY_VERSION = "cpu" - PACKAGE_VERSION = VersionNumber([ - versionNumberString : '${SEMVER}-cpu-${OS_NAME}-${CPU_ARCH}-${LOWER_BUILD_TYPE}-${BUILD_DATE_FORMATTED, "yyyyMMdd"}-${BUILDS_TODAY}' - ]); - DOCKER_VERSION = "${SEMVER}-cpu-${OS_NAME}-${LOWER_BUILD_TYPE}" - } - - stages { - stage("Run Build") { - agent { - kubernetes { - label "${env.BINRARY_VERSION}-build" - defaultContainer 'jnlp' - yaml """ -apiVersion: v1 -kind: Pod -metadata: - name: milvus-cpu-build-env - labels: - app: milvus - componet: cpu-build-env -spec: - containers: - - name: milvus-cpu-build-env - image: registry.zilliz.com/milvus/milvus-cpu-build-env:v0.6.0-ubuntu18.04 - env: - - name: POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: BUILD_ENV_IMAGE_ID - value: "23476391bec80c64f10d44a6370c73c71f011a6b95114b10ff82a60e771e11c7" - command: - - cat - tty: true - resources: - limits: - memory: "24Gi" - cpu: "8.0" - requests: - memory: "16Gi" - cpu: "4.0" - - name: milvus-mysql - image: mysql:5.6 - env: - - name: MYSQL_ROOT_PASSWORD - value: 123456 - ports: - - containerPort: 3306 - name: mysql - """ - } - } - - stages { - stage('Prepare') { - steps { - container("milvus-${env.BINRARY_VERSION}-build-env") { - script { - dir ("milvus") { - checkout([$class: 'GitSCM', branches: [[name: "${env.SEMVER}"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus.git", name: 'origin', refspec: "+refs/heads/${env.SEMVER}:refs/remotes/origin/${env.SEMVER}"]]]) - } - } - } - } - } - - stage('Build') { - steps { - container("milvus-${env.BINRARY_VERSION}-build-env") { - script { - load "${env.WORKSPACE}/ci/jenkins/step/build.groovy" - } - } - } - } - stage('Code Coverage') { - steps { - container("milvus-${env.BINRARY_VERSION}-build-env") { - script { - load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy" - } - } - } - } - stage('Upload Package') { - steps { - container("milvus-${env.BINRARY_VERSION}-build-env") { - script { - dir ("milvus") { - load "ci/jenkins/step/package.groovy" - } - } - } - } - } - } - } - - stage("Publish docker images") { - agent { - kubernetes { - label "${BINRARY_VERSION}-publish" - defaultContainer 'jnlp' - yaml """ -apiVersion: v1 -kind: Pod -metadata: - labels: - app: publish - componet: docker -spec: - containers: - - name: publish-images - image: registry.zilliz.com/library/docker:v1.0.0 - securityContext: - privileged: true - command: - - cat - tty: true - volumeMounts: - - name: docker-sock - mountPath: /var/run/docker.sock - volumes: - - name: docker-sock - hostPath: - path: /var/run/docker.sock -""" - } - } - - stages { - stage('Prepare') { - steps { - container("publish-images") { - script { - dir ("milvus") { - checkout([$class: 'GitSCM', branches: [[name: "${env.SEMVER}"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus.git", name: 'origin', refspec: "+refs/heads/${env.SEMVER}:refs/remotes/origin/${env.SEMVER}"]]]) - } - } - } - } - } - - stage('Publish') { - steps { - container('publish-images'){ - script { - dir ("milvus") { - load "ci/jenkins/step/publishImages.groovy" - } - } - } - } - } - } - } - - stage("Deploy to Development") { - environment { - FROMAT_SEMVER = "${env.SEMVER}".replaceAll("\\.", "-") - HELM_RELEASE_NAME = "${env.PIPELINE_NAME}-${env.FROMAT_SEMVER}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}".toLowerCase() - } - - agent { - kubernetes { - label "${env.BINRARY_VERSION}-dev-test" - defaultContainer 'jnlp' - yaml """ -apiVersion: v1 -kind: Pod -metadata: - labels: - app: milvus - componet: test-env -spec: - containers: - - name: milvus-test-env - image: registry.zilliz.com/milvus/milvus-test-env:v0.1 - command: - - cat - tty: true - volumeMounts: - - name: kubeconf - mountPath: /root/.kube/ - readOnly: true - volumes: - - name: kubeconf - secret: - secretName: test-cluster-config -""" - } - } - - stages { - stage('Prepare') { - steps { - container('milvus-test-env') { - script { - dir ("milvus") { - checkout([$class: 'GitSCM', branches: [[name: "${env.SEMVER}"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus.git", name: 'origin', refspec: "+refs/heads/${env.SEMVER}:refs/remotes/origin/${env.SEMVER}"]]]) - } - } - } - } - } - - stage("Deploy to Dev") { - steps { - container('milvus-test-env') { - script { - dir ("milvus") { - load "ci/jenkins/step/deploySingle2Dev.groovy" - } - } - } - } - } - - stage("Dev Test") { - steps { - container('milvus-test-env') { - script { - boolean isNightlyTest = isTimeTriggeredBuild() - if (isNightlyTest) { - dir ("milvus") { - load "ci/jenkins/step/singleDevNightlyTest.groovy" - } - } else { - dir ("milvus") { - load "ci/jenkins/step/singleDevTest.groovy" - } - } - } - } - } - } - - stage ("Cleanup Dev") { - steps { - container('milvus-test-env') { - script { - dir ("milvus") { - load "ci/jenkins/step/cleanupSingleDev.groovy" - } - } - } - } - } - } - post { - unsuccessful { - container('milvus-test-env') { - script { - dir ("milvus") { - load "ci/jenkins/step/cleanupSingleDev.groovy" - } - } - } - } - } - } - } - } - } - } - } -} - -boolean isTimeTriggeredBuild() { - if (currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0) { - return true - } - return false -} diff --git a/core/src/index/thirdparty/faiss/ci/jenkins/step/build.groovy b/core/src/index/thirdparty/faiss/ci/jenkins/step/build.groovy deleted file mode 100644 index 6013aea5ea..0000000000 --- a/core/src/index/thirdparty/faiss/ci/jenkins/step/build.groovy +++ /dev/null @@ -1,35 +0,0 @@ -timeout(time: 60, unit: 'MINUTES') { - dir ("ci/scripts") { - if ("${env.BINRARY_VERSION}" == "gpu") { - if ("${params.IS_ORIGIN_FAISS}" == "False") { - sh "./build.sh -o ${env.FAISS_ROOT_PATH} -i -g" - } else { - sh "wget https://github.com/facebookresearch/faiss/archive/v${env.NATIVE_FAISS_VERSION}.tar.gz && \ - tar zxvf v${env.NATIVE_FAISS_VERSION}.tar.gz" - sh "./build.sh -o ${env.FAISS_ROOT_PATH} -s ./faiss-${env.NATIVE_FAISS_VERSION} -i -g" - } - } else { - sh "wget https://github.com/facebookresearch/faiss/archive/v${env.NATIVE_FAISS_VERSION}.tar.gz && \ - tar zxvf v${env.NATIVE_FAISS_VERSION}.tar.gz" - sh "./build.sh -o ${env.FAISS_ROOT_PATH} -s ./faiss-${env.NATIVE_FAISS_VERSION} -i" - } - } - - dir ("milvus") { - dir ("ci/scripts") { - withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) { - def checkResult = sh(script: "./check_ccache.sh -l ${params.JFROG_ARTFACTORY_URL}/ccache", returnStatus: true) - if ("${env.BINRARY_VERSION}" == "gpu") { - if ("${params.IS_ORIGIN_FAISS}" == "False") { - sh ". ./before-install.sh && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -f ${env.FAISS_ROOT_PATH} -l -m -g -x -u -c" - } else { - sh ". ./before-install.sh && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -f ${env.FAISS_ROOT_PATH} -l -m -g -u -c" - } - } else { - sh ". ./before-install.sh && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -f ${env.FAISS_ROOT_PATH} -l -m -u -c" - } - sh "./update_ccache.sh -l ${params.JFROG_ARTFACTORY_URL}/ccache -u ${USERNAME} -p ${PASSWORD}" - } - } - } -} diff --git a/core/src/index/thirdparty/faiss/ci/jenkins/step/coverage.groovy b/core/src/index/thirdparty/faiss/ci/jenkins/step/coverage.groovy deleted file mode 100644 index 40d2d2f03a..0000000000 --- a/core/src/index/thirdparty/faiss/ci/jenkins/step/coverage.groovy +++ /dev/null @@ -1,5 +0,0 @@ -timeout(time: 30, unit: 'MINUTES') { - dir ("milvus/ci/scripts") { - sh "./coverage.sh -o /opt/milvus -u root -p 123456 -t \$POD_IP" - } -} \ No newline at end of file diff --git a/core/src/index/thirdparty/faiss/ci/scripts/build.sh b/core/src/index/thirdparty/faiss/ci/scripts/build.sh deleted file mode 100755 index f3bb2dcaca..0000000000 --- a/core/src/index/thirdparty/faiss/ci/scripts/build.sh +++ /dev/null @@ -1,127 +0,0 @@ -#!/bin/bash - -set -e - -SOURCE="${BASH_SOURCE[0]}" -while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink - DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" - SOURCE="$(readlink "$SOURCE")" - [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located -done -SCRIPTS_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" - -FAISS_SOURCE_DIR="${SCRIPTS_DIR}/../.." - -FAISS_WITH_MKL="False" -FAISS_GPU_VERSION="False" -FAISS_COMMON_CONFIGURE_ARGS="CXXFLAGS=\"-mavx2 -mf16c\" --without-python" -FAISS_CONFIGURE_ARGS="${FAISS_COMMON_CONFIGURE_ARGS}" -CUDA_TOOLKIT_ROOT_DIR="/usr/local/cuda" -FAISS_CUDA_ARCH="-gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_61,code=sm_61 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_75,code=sm_75" -MKL_ROOT_DIR="/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl" - -while getopts "o:s:m:b:l:c:a:igh" arg -do - case $arg in - o) - FAISS_INSTALL_PREFIX=$OPTARG - ;; - s) - FAISS_SOURCE_DIR=$OPTARG - ;; - m) - MKL_ROOT_DIR=$OPTARG - ;; - b) - OPENBLAS_PREFIX=$OPTARG - ;; - l) - LAPACK_PREFIX=$OPTARG - ;; - c) - CUDA_TOOLKIT_ROOT_DIR=$OPTARG - ;; - a) - FAISS_CUDA_ARCH=$OPTARG - ;; - i) - FAISS_WITH_MKL="True" - ;; - g) - FAISS_GPU_VERSION="True" - ;; - h) # help - echo " - -parameter: --o: faiss install prefix path --s: faiss source directory --m: mkl root directory --b: openblas install prefix path --l: lapack install prefix path --c: CUDA toolkit root directory --a: faiss CUDA compute architecture --i: faiss with mkl --g: faiss gpu version --h: help - -usage: -./build.sh -o \${FAISS_INSTALL_PREFIX} -s \${FAISS_SOURCE_DIR} -m \${MKL_ROOT_DIR} -b \${OPENBLAS_PREFIX} -l \${LAPACK_PREFIX} -c \${CUDA_TOOLKIT_ROOT_DIR} -a \${FAISS_CUDA_ARCH} [-i] [-g] [-h] - " - exit 0 - ;; - ?) - echo "ERROR! unknown argument" - exit 1 - ;; - esac -done - -if [[ -n "${FAISS_INSTALL_PREFIX}" ]];then - FAISS_CONFIGURE_ARGS="${FAISS_CONFIGURE_ARGS} --prefix=${FAISS_INSTALL_PREFIX}" -fi - -if [[ "${FAISS_GPU_VERSION}" == "True" ]];then - if [[ ! -n "${FAISS_CUDA_ARCH}" ]];then - echo "FAISS_CUDA_ARCH: \"${FAISS_CUDA_ARCH}\" is empty!" - exit 1 - fi - if [[ ! -d "${CUDA_TOOLKIT_ROOT_DIR}" ]];then - echo "CUDA_TOOLKIT_ROOT_DIR: \"${CUDA_TOOLKIT_ROOT_DIR}\" directory doesn't exist!" - exit 1 - fi - FAISS_CONFIGURE_ARGS="${FAISS_CONFIGURE_ARGS} --with-cuda=${CUDA_TOOLKIT_ROOT_DIR} --with-cuda-arch='${FAISS_CUDA_ARCH}'" -else - FAISS_CONFIGURE_ARGS="${FAISS_CONFIGURE_ARGS} --without-cuda" -fi - -if [[ "${FAISS_WITH_MKL}" == "True" ]];then - if [[ ! -d "${MKL_ROOT_DIR}" ]];then - echo "MKL_ROOT_DIR: \"${MKL_ROOT_DIR}\" directory doesn't exist!" - exit 1 - fi - FAISS_CONFIGURE_ARGS="${FAISS_CONFIGURE_ARGS} CPPFLAGS='-DFINTEGER=long -DMKL_ILP64 -m64 -I${MKL_ROOT_DIR}/include' LDFLAGS='-L${MKL_ROOT_DIR}/lib/intel64'" -else - if [[ -n "${LAPACK_PREFIX}" ]];then - if [[ ! -d "${LAPACK_PREFIX}" ]];then - echo "LAPACK_PREFIX: \"${LAPACK_PREFIX}\" directory doesn't exist!" - exit 1 - fi - FAISS_CONFIGURE_ARGS="${FAISS_CONFIGURE_ARGS} LDFLAGS=-L${LAPACK_PREFIX}/lib" - fi - if [[ -n "${OPENBLAS_PREFIX}" ]];then - if [[ ! -d "${OPENBLAS_PREFIX}" ]];then - echo "OPENBLAS_PREFIX: \"${OPENBLAS_PREFIX}\" directory doesn't exist!" - exit 1 - fi - FAISS_CONFIGURE_ARGS="${FAISS_CONFIGURE_ARGS} LDFLAGS=-L${OPENBLAS_PREFIX}/lib" - fi -fi - -cd ${FAISS_SOURCE_DIR} - -sh -c "./configure ${FAISS_CONFIGURE_ARGS}" - -# compile and build -make -j8 || exit 1 -make install || exit 1 diff --git a/sdk/cmake/ThirdPartyPackages.cmake b/sdk/cmake/ThirdPartyPackages.cmake index 4a82dda2cb..a2cacdd842 100644 --- a/sdk/cmake/ThirdPartyPackages.cmake +++ b/sdk/cmake/ThirdPartyPackages.cmake @@ -215,67 +215,26 @@ macro(build_grpc) set(GRPC_PROTOBUF_STATIC_LIB "${GRPC_PROTOBUF_LIB_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}protobuf${CMAKE_STATIC_LIBRARY_SUFFIX}") set(GRPC_PROTOC_STATIC_LIB "${GRPC_PROTOBUF_LIB_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}protoc${CMAKE_STATIC_LIBRARY_SUFFIX}") - if (USE_JFROG_CACHE STREQUAL "ON") - set(GRPC_CACHE_PACKAGE_NAME "grpc_${GRPC_MD5}.tar.gz") - set(GRPC_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${GRPC_CACHE_PACKAGE_NAME}") - set(GRPC_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${GRPC_CACHE_PACKAGE_NAME}") + externalproject_add(grpc_ep + URL + ${GRPC_SOURCE_URL} + ${EP_LOG_OPTIONS} + CONFIGURE_COMMAND + "" + BUILD_IN_SOURCE + 1 + BUILD_COMMAND + ${MAKE} ${MAKE_BUILD_ARGS} prefix=${GRPC_PREFIX} + INSTALL_COMMAND + ${MAKE} install prefix=${GRPC_PREFIX} + BUILD_BYPRODUCTS + ${GRPC_STATIC_LIB} + ${GRPC++_STATIC_LIB} + ${GRPCPP_CHANNELZ_STATIC_LIB} + ${GRPC_PROTOBUF_STATIC_LIB} + ${GRPC_PROTOC_STATIC_LIB}) - execute_process(COMMAND wget -q --method HEAD ${GRPC_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${GRPC_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(grpc_ep - URL - ${GRPC_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "" - BUILD_IN_SOURCE - 1 - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} prefix=${GRPC_PREFIX} - INSTALL_COMMAND - ${MAKE} install prefix=${GRPC_PREFIX} - BUILD_BYPRODUCTS - ${GRPC_STATIC_LIB} - ${GRPC++_STATIC_LIB} - ${GRPCPP_CHANNELZ_STATIC_LIB} - ${GRPC_PROTOBUF_STATIC_LIB} - ${GRPC_PROTOC_STATIC_LIB}) - - ExternalProject_Add_StepDependencies(grpc_ep build zlib_ep) - - ExternalProject_Create_Cache(grpc_ep ${GRPC_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/grpc_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${GRPC_CACHE_URL}) - else () - file(DOWNLOAD ${GRPC_CACHE_URL} ${GRPC_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${GRPC_CACHE_URL} TO ${GRPC_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(grpc_ep ${GRPC_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(grpc_ep - URL - ${GRPC_SOURCE_URL} - ${EP_LOG_OPTIONS} - CONFIGURE_COMMAND - "" - BUILD_IN_SOURCE - 1 - BUILD_COMMAND - ${MAKE} ${MAKE_BUILD_ARGS} prefix=${GRPC_PREFIX} - INSTALL_COMMAND - ${MAKE} install prefix=${GRPC_PREFIX} - BUILD_BYPRODUCTS - ${GRPC_STATIC_LIB} - ${GRPC++_STATIC_LIB} - ${GRPCPP_CHANNELZ_STATIC_LIB} - ${GRPC_PROTOBUF_STATIC_LIB} - ${GRPC_PROTOC_STATIC_LIB}) - - ExternalProject_Add_StepDependencies(grpc_ep build zlib_ep) - - endif () + ExternalProject_Add_StepDependencies(grpc_ep build zlib_ep) file(MAKE_DIRECTORY "${GRPC_INCLUDE_DIR}") @@ -338,48 +297,17 @@ macro(build_zlib) set(ZLIB_CMAKE_ARGS ${EP_COMMON_CMAKE_ARGS} "-DCMAKE_INSTALL_PREFIX=${ZLIB_PREFIX}" -DBUILD_SHARED_LIBS=OFF) - if (USE_JFROG_CACHE STREQUAL "ON") - set(ZLIB_CACHE_PACKAGE_NAME "zlib_${ZLIB_MD5}.tar.gz") - set(ZLIB_CACHE_URL "${JFROG_ARTFACTORY_CACHE_URL}/${ZLIB_CACHE_PACKAGE_NAME}") - set(ZLIB_CACHE_PACKAGE_PATH "${THIRDPARTY_PACKAGE_CACHE}/${ZLIB_CACHE_PACKAGE_NAME}") - - execute_process(COMMAND wget -q --method HEAD ${ZLIB_CACHE_URL} RESULT_VARIABLE return_code) - message(STATUS "Check the remote file ${ZLIB_CACHE_URL}. return code = ${return_code}") - if (NOT return_code EQUAL 0) - externalproject_add(zlib_ep - URL - ${ZLIB_SOURCE_URL} - ${EP_LOG_OPTIONS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - "${ZLIB_STATIC_LIB}" - CMAKE_ARGS - ${ZLIB_CMAKE_ARGS}) - - ExternalProject_Create_Cache(zlib_ep ${ZLIB_CACHE_PACKAGE_PATH} "${CMAKE_CURRENT_BINARY_DIR}/zlib_ep-prefix" ${JFROG_USER_NAME} ${JFROG_PASSWORD} ${ZLIB_CACHE_URL}) - else () - file(DOWNLOAD ${ZLIB_CACHE_URL} ${ZLIB_CACHE_PACKAGE_PATH} STATUS status) - list(GET status 0 status_code) - message(STATUS "DOWNLOADING FROM ${ZLIB_CACHE_URL} TO ${ZLIB_CACHE_PACKAGE_PATH}. STATUS = ${status_code}") - if (status_code EQUAL 0) - ExternalProject_Use_Cache(zlib_ep ${ZLIB_CACHE_PACKAGE_PATH} ${CMAKE_CURRENT_BINARY_DIR}) - endif () - endif () - else () - externalproject_add(zlib_ep - URL - ${ZLIB_SOURCE_URL} - ${EP_LOG_OPTIONS} - BUILD_COMMAND - ${MAKE} - ${MAKE_BUILD_ARGS} - BUILD_BYPRODUCTS - "${ZLIB_STATIC_LIB}" - CMAKE_ARGS - ${ZLIB_CMAKE_ARGS}) - endif () + externalproject_add(zlib_ep + URL + ${ZLIB_SOURCE_URL} + ${EP_LOG_OPTIONS} + BUILD_COMMAND + ${MAKE} + ${MAKE_BUILD_ARGS} + BUILD_BYPRODUCTS + "${ZLIB_STATIC_LIB}" + CMAKE_ARGS + ${ZLIB_CMAKE_ARGS}) file(MAKE_DIRECTORY "${ZLIB_INCLUDE_DIR}") add_library(zlib STATIC IMPORTED) From 09701c3f2b1921292eace2788e78498158ffc5ee Mon Sep 17 00:00:00 2001 From: Cai Yudong Date: Thu, 26 Dec 2019 14:16:20 +0800 Subject: [PATCH 3/5] #216 add CLI to get system info (#806) --- CHANGELOG.md | 1 + core/src/metrics/SystemInfo.cpp | 31 ++++++++++++++----- core/src/metrics/SystemInfo.h | 12 +++---- .../server/delivery/request/CmdRequest.cpp | 4 +++ 4 files changed, 33 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c6f2d82da..645a599c62 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ Please mark all change in change log and use the issue from GitHub - \#770 - Server unittest run failed on low-end server ## Feature +- \#216 - Add CLI to get server info - \#343 - Add Opentracing - \#665 - Support get/set config via CLI - \#766 - If partition tag is similar, wrong partition is searched diff --git a/core/src/metrics/SystemInfo.cpp b/core/src/metrics/SystemInfo.cpp index 1421cc1c95..90d670b453 100644 --- a/core/src/metrics/SystemInfo.cpp +++ b/core/src/metrics/SystemInfo.cpp @@ -16,17 +16,14 @@ // under the License. #include "metrics/SystemInfo.h" +#include "thirdparty/nlohmann/json.hpp" #include "utils/Log.h" #include -#include -#include -#include +#include +#include #include -#include -#include -#include -#include +#include #ifdef MILVUS_GPU_VERSION #include @@ -350,5 +347,25 @@ SystemInfo::Octets() { return res; } +void +SystemInfo::GetSysInfoJsonStr(std::string& result) { + std::map sys_info_map; + + sys_info_map["memory_total"] = std::to_string(GetPhysicalMemory()); + sys_info_map["memory_used"] = std::to_string(GetProcessUsedMemory()); + + std::vector gpu_mem_total = GPUMemoryTotal(); + std::vector gpu_mem_used = GPUMemoryUsed(); + for (size_t i = 0; i < gpu_mem_total.size(); i++) { + std::string key_total = "gpu" + std::to_string(i) + "_memory_total"; + std::string key_used = "gpu" + std::to_string(i) + "_memory_used"; + sys_info_map[key_total] = std::to_string(gpu_mem_total[i]); + sys_info_map[key_used] = std::to_string(gpu_mem_used[i]); + } + + nlohmann::json sys_info_json(sys_info_map); + result = sys_info_json.dump(); +} + } // namespace server } // namespace milvus diff --git a/core/src/metrics/SystemInfo.h b/core/src/metrics/SystemInfo.h index 0176475232..03cada2033 100644 --- a/core/src/metrics/SystemInfo.h +++ b/core/src/metrics/SystemInfo.h @@ -17,15 +17,8 @@ #pragma once -#include -#include -#include -#include -#include -#include -#include #include -#include +#include #include #include @@ -127,6 +120,9 @@ class SystemInfo { GPUTemperature(); std::vector CPUTemperature(); + + void + GetSysInfoJsonStr(std::string& result); }; } // namespace server diff --git a/core/src/server/delivery/request/CmdRequest.cpp b/core/src/server/delivery/request/CmdRequest.cpp index 4263f8715c..e4e8c9cc50 100644 --- a/core/src/server/delivery/request/CmdRequest.cpp +++ b/core/src/server/delivery/request/CmdRequest.cpp @@ -16,6 +16,7 @@ // under the License. #include "server/delivery/request/CmdRequest.h" +#include "metrics/SystemInfo.h" #include "scheduler/SchedInst.h" #include "utils/Log.h" #include "utils/TimeRecorder.h" @@ -52,6 +53,9 @@ CmdRequest::OnExecute() { #else result_ = "CPU"; #endif + } else if (cmd_ == "get_system_info") { + server::SystemInfo& sys_info_inst = server::SystemInfo::GetInstance(); + sys_info_inst.GetSysInfoJsonStr(result_); } else if (cmd_ == "build_commit_id") { result_ = LAST_COMMIT_ID; } else if (cmd_.substr(0, 10) == "set_config" || cmd_.substr(0, 10) == "get_config") { From 546a1fcb748d3e3238a158185ad6f61087401ac4 Mon Sep 17 00:00:00 2001 From: ABNER-1 Date: Thu, 26 Dec 2019 14:18:11 +0800 Subject: [PATCH 4/5] Fix common util judge error (#835) * '#831 fix exe_path judge error' * #831 fix exe_path judge error --- CHANGELOG.md | 1 + core/src/utils/CommonUtil.cpp | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 645a599c62..5f913fce70 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ Please mark all change in change log and use the issue from GitHub - \#715 - Milvus crash when searching and building index simultaneously using SQ8H - \#744 - Don't return partition table for show_tables - \#770 - Server unittest run failed on low-end server +- \#831 - Judge branch error in CommonUtil.cpp ## Feature - \#216 - Add CLI to get server info diff --git a/core/src/utils/CommonUtil.cpp b/core/src/utils/CommonUtil.cpp index cdfae8f1e5..7064764590 100644 --- a/core/src/utils/CommonUtil.cpp +++ b/core/src/utils/CommonUtil.cpp @@ -190,7 +190,7 @@ CommonUtil::GetExePath() { buf[cnt] = '\0'; std::string exe_path = buf; - if (exe_path.rfind('/') != exe_path.length()) { + if (exe_path.rfind('/') != exe_path.length() - 1) { std::string sub_str = exe_path.substr(0, exe_path.rfind('/')); return sub_str + "/"; } From b140d8259b115d04f228e5dd4f6c05f20c3e3562 Mon Sep 17 00:00:00 2001 From: del-zhenwu <56623710+del-zhenwu@users.noreply.github.com> Date: Thu, 26 Dec 2019 14:19:52 +0800 Subject: [PATCH 5/5] Update pip source (#822) * Update pip source * Update requirements * Remove query range case --- ci/jenkins/step/singleDevNightlyTest.groovy | 2 +- ci/jenkins/step/singleDevTest.groovy | 2 +- tests/milvus_python_test/requirements.txt | 15 --------------- tests/milvus_python_test/test_search_vectors.py | 16 ++++++++++------ 4 files changed, 12 insertions(+), 23 deletions(-) diff --git a/ci/jenkins/step/singleDevNightlyTest.groovy b/ci/jenkins/step/singleDevNightlyTest.groovy index 74a8768cba..823cc25660 100644 --- a/ci/jenkins/step/singleDevNightlyTest.groovy +++ b/ci/jenkins/step/singleDevNightlyTest.groovy @@ -1,6 +1,6 @@ timeout(time: 90, unit: 'MINUTES') { dir ("tests/milvus_python_test") { - sh 'python3 -m pip install -r requirements.txt' + sh 'python3 -m pip install -r requirements.txt -i http://pypi.douban.com/simple --trusted-host pypi.douban.com' sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --ip ${env.HELM_RELEASE_NAME}-engine.milvus.svc.cluster.local" } // mysql database backend test diff --git a/ci/jenkins/step/singleDevTest.groovy b/ci/jenkins/step/singleDevTest.groovy index 291b4470e5..0ad9e369bf 100644 --- a/ci/jenkins/step/singleDevTest.groovy +++ b/ci/jenkins/step/singleDevTest.groovy @@ -1,6 +1,6 @@ timeout(time: 60, unit: 'MINUTES') { dir ("tests/milvus_python_test") { - sh 'python3 -m pip install -r requirements.txt' + sh 'python3 -m pip install -r requirements.txt -i http://pypi.douban.com/simple --trusted-host pypi.douban.com' sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --level=1 --ip ${env.HELM_RELEASE_NAME}-engine.milvus.svc.cluster.local" } diff --git a/tests/milvus_python_test/requirements.txt b/tests/milvus_python_test/requirements.txt index 016c8dedfc..df3191ccd6 100644 --- a/tests/milvus_python_test/requirements.txt +++ b/tests/milvus_python_test/requirements.txt @@ -1,14 +1,4 @@ -astroid==2.2.5 -atomicwrites==1.3.0 -attrs==19.1.0 -importlib-metadata==0.15 -isort==4.3.20 -lazy-object-proxy==1.4.1 -mccabe==0.6.1 -more-itertools==7.0.0 numpy==1.16.3 -pluggy==0.12.0 -py==1.8.0 pylint==2.3.1 pytest==4.5.0 pytest-timeout==1.3.3 @@ -16,10 +6,5 @@ pytest-repeat==0.8.0 allure-pytest==2.7.0 pytest-print==0.1.2 pytest-level==0.1.1 -six==1.12.0 -typed-ast==1.3.5 -wcwidth==0.1.7 -wrapt==1.11.1 -zipp==0.5.1 scikit-learn>=0.19.1 pymilvus-test>=0.2.0 diff --git a/tests/milvus_python_test/test_search_vectors.py b/tests/milvus_python_test/test_search_vectors.py index 464a28efea..76bc81b384 100644 --- a/tests/milvus_python_test/test_search_vectors.py +++ b/tests/milvus_python_test/test_search_vectors.py @@ -449,7 +449,8 @@ class TestSearchBase: def get_invalid_range(self, request): yield request.param - def test_search_invalid_query_ranges(self, connect, table, get_invalid_range): + # disable + def _test_search_invalid_query_ranges(self, connect, table, get_invalid_range): ''' target: search table with query ranges method: search with the same query ranges @@ -477,7 +478,8 @@ class TestSearchBase: def get_valid_range_no_result(self, request): yield request.param - def test_search_valid_query_ranges_no_result(self, connect, table, get_valid_range_no_result): + # disable + def _test_search_valid_query_ranges_no_result(self, connect, table, get_valid_range_no_result): ''' target: search table with normal query ranges, but no data in db method: search with query ranges (low, low) @@ -505,7 +507,8 @@ class TestSearchBase: def get_valid_range(self, request): yield request.param - def test_search_valid_query_ranges(self, connect, table, get_valid_range): + # disable + def _test_search_valid_query_ranges(self, connect, table, get_valid_range): ''' target: search table with normal query ranges, but no data in db method: search with query ranges (low, normal) @@ -878,8 +881,9 @@ class TestSearchParamsInvalid(object): def get_query_ranges(self, request): yield request.param + # disable @pytest.mark.level(1) - def test_search_flat_with_invalid_query_range(self, connect, table, get_query_ranges): + def _test_search_flat_with_invalid_query_range(self, connect, table, get_query_ranges): ''' target: test search fuction, with the wrong query_range method: search with query_range @@ -893,9 +897,9 @@ class TestSearchParamsInvalid(object): with pytest.raises(Exception) as e: status, result = connect.search_vectors(table, 1, nprobe, query_vecs, query_ranges=query_ranges) - + # disable @pytest.mark.level(2) - def test_search_flat_with_invalid_query_range_ip(self, connect, ip_table, get_query_ranges): + def _test_search_flat_with_invalid_query_range_ip(self, connect, ip_table, get_query_ranges): ''' target: test search fuction, with the wrong query_range method: search with query_range