diff --git a/CHANGELOG.md b/CHANGELOG.md index 590d5c151c..173a93df23 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,9 +4,12 @@ Please mark all change in change log and use the ticket from JIRA. # Milvus 0.6.0 (TODO) ## Bug +- \#228 - memory usage increased slowly during searching vectors - \#246 - Exclude src/external folder from code coverage for jenkin ci - \#248 - Reside src/external in thirdparty +- \#316 - Some files not merged after vectors added - \#327 - Search does not use GPU when index type is FLAT +- \#340 - Test cases run failed on 0.6.0 ## Feature - \#12 - Pure CPU version for Milvus @@ -19,6 +22,8 @@ Please mark all change in change log and use the ticket from JIRA. - \#260 - C++ SDK README - \#314 - add Find FAISS in CMake - \#310 - Add Q&A for 'protocol https not supported or disable in libcurl' issue +- \#322 - Add option to enable / disable prometheus +- \#358 - Add more information in build.sh and install.md ## Task diff --git a/ci/jenkins/Jenkinsfile b/ci/jenkins/Jenkinsfile index 40d9686415..f0562ada64 100644 --- a/ci/jenkins/Jenkinsfile +++ b/ci/jenkins/Jenkinsfile @@ -33,128 +33,267 @@ pipeline { } stages { - stage("Ubuntu 18.04") { + stage("Ubuntu 18.04 x86_64") { environment { OS_NAME = "ubuntu18.04" - PACKAGE_VERSION = VersionNumber([ - versionNumberString : '${SEMVER}-${LOWER_BUILD_TYPE}-ubuntu18.04-x86_64-${BUILD_DATE_FORMATTED, "yyyyMMdd"}-${BUILDS_TODAY}' - ]); - DOCKER_VERSION = "${SEMVER}-${OS_NAME}-${LOWER_BUILD_TYPE}" + CPU_ARCH = "amd64" } - stages { - stage("Run Build") { - agent { - kubernetes { - label 'build' - defaultContainer 'jnlp' - yamlFile 'ci/jenkins/pod/milvus-build-env-pod.yaml' - } + parallel { + stage ("GPU Version") { + environment { + BINRARY_VERSION = "gpu" + PACKAGE_VERSION = VersionNumber([ + versionNumberString : '${SEMVER}-gpu-${OS_NAME}-${CPU_ARCH}-${LOWER_BUILD_TYPE}-${BUILD_DATE_FORMATTED, "yyyyMMdd"}-${BUILDS_TODAY}' + ]); + DOCKER_VERSION = "${SEMVER}-gpu-${OS_NAME}-${LOWER_BUILD_TYPE}" } stages { - stage('Build') { - steps { - container('milvus-build-env') { - script { - load "${env.WORKSPACE}/ci/jenkins/step/build.groovy" - } + stage("Run Build") { + agent { + kubernetes { + label "${BINRARY_VERSION}-build" + defaultContainer 'jnlp' + yamlFile 'ci/jenkins/pod/milvus-gpu-version-build-env-pod.yaml' } } - } - stage('Code Coverage') { - steps { - container('milvus-build-env') { - script { - load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy" + + stages { + stage('Build') { + steps { + container('milvus-build-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/build.groovy" + } + } } } - } - } - stage('Upload Package') { - steps { - container('milvus-build-env') { - script { - load "${env.WORKSPACE}/ci/jenkins/step/package.groovy" + stage('Code Coverage') { + steps { + container('milvus-build-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy" + } + } } } - } - } - } - } - - stage("Publish docker images") { - agent { - kubernetes { - label 'publish' - defaultContainer 'jnlp' - yamlFile 'ci/jenkins/pod/docker-pod.yaml' - } - } - - stages { - stage('Publish') { - steps { - container('publish-images'){ - script { - load "${env.WORKSPACE}/ci/jenkins/step/publishImages.groovy" - } - } - } - } - } - } - - stage("Deploy to Development") { - agent { - kubernetes { - label 'dev-test' - defaultContainer 'jnlp' - yamlFile 'ci/jenkins/pod/testEnvironment.yaml' - } - } - - stages { - stage("Deploy to Dev") { - steps { - container('milvus-test-env') { - script { - load "${env.WORKSPACE}/ci/jenkins/step/deploySingle2Dev.groovy" - } - } - } - } - - stage("Dev Test") { - steps { - container('milvus-test-env') { - script { - boolean isNightlyTest = isTimeTriggeredBuild() - if (isNightlyTest) { - load "${env.WORKSPACE}/ci/jenkins/step/singleDevNightlyTest.groovy" - } else { - load "${env.WORKSPACE}/ci/jenkins/step/singleDevTest.groovy" + stage('Upload Package') { + steps { + container('milvus-build-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/package.groovy" + } } } } } } - stage ("Cleanup Dev") { - steps { - container('milvus-test-env') { - script { - load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy" + stage("Publish docker images") { + agent { + kubernetes { + label "${BINRARY_VERSION}-publish" + defaultContainer 'jnlp' + yamlFile 'ci/jenkins/pod/docker-pod.yaml' + } + } + + stages { + stage('Publish') { + steps { + container('publish-images'){ + script { + load "${env.WORKSPACE}/ci/jenkins/step/publishImages.groovy" + } + } + } + } + } + } + + stage("Deploy to Development") { + agent { + kubernetes { + label "${BINRARY_VERSION}-dev-test" + defaultContainer 'jnlp' + yamlFile 'ci/jenkins/pod/testEnvironment.yaml' + } + } + + stages { + stage("Deploy to Dev") { + steps { + container('milvus-test-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/deploySingle2Dev.groovy" + } + } + } + } + + stage("Dev Test") { + steps { + container('milvus-test-env') { + script { + boolean isNightlyTest = isTimeTriggeredBuild() + if (isNightlyTest) { + load "${env.WORKSPACE}/ci/jenkins/step/singleDevNightlyTest.groovy" + } else { + load "${env.WORKSPACE}/ci/jenkins/step/singleDevTest.groovy" + } + } + } + } + } + + stage ("Cleanup Dev") { + steps { + container('milvus-test-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy" + } + } + } + } + } + post { + unsuccessful { + container('milvus-test-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy" + } } } } } } - post { - unsuccessful { - container('milvus-test-env') { - script { - load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy" + } + + stage ("CPU Version") { + environment { + BINRARY_VERSION = "cpu" + PACKAGE_VERSION = VersionNumber([ + versionNumberString : '${SEMVER}-cpu-${OS_NAME}-${CPU_ARCH}-${LOWER_BUILD_TYPE}-${BUILD_DATE_FORMATTED, "yyyyMMdd"}-${BUILDS_TODAY}' + ]); + DOCKER_VERSION = "${SEMVER}-cpu-${OS_NAME}-${LOWER_BUILD_TYPE}" + } + + stages { + stage("Run Build") { + agent { + kubernetes { + label "${BINRARY_VERSION}-build" + defaultContainer 'jnlp' + yamlFile 'ci/jenkins/pod/milvus-cpu-version-build-env-pod.yaml' + } + } + + stages { + stage('Build') { + steps { + container('milvus-build-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/build.groovy" + } + } + } + } + stage('Code Coverage') { + steps { + container('milvus-build-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy" + } + } + } + } + stage('Upload Package') { + steps { + container('milvus-build-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/package.groovy" + } + } + } + } + } + } + + stage("Publish docker images") { + agent { + kubernetes { + label "${BINRARY_VERSION}-publish" + defaultContainer 'jnlp' + yamlFile 'ci/jenkins/pod/docker-pod.yaml' + } + } + + stages { + stage('Publish') { + steps { + container('publish-images'){ + script { + load "${env.WORKSPACE}/ci/jenkins/step/publishImages.groovy" + } + } + } + } + } + } + + stage("Deploy to Development") { + agent { + kubernetes { + label "${BINRARY_VERSION}-dev-test" + defaultContainer 'jnlp' + yamlFile 'ci/jenkins/pod/testEnvironment.yaml' + } + } + + stages { + stage("Deploy to Dev") { + steps { + container('milvus-test-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/deploySingle2Dev.groovy" + } + } + } + } + + stage("Dev Test") { + steps { + container('milvus-test-env') { + script { + boolean isNightlyTest = isTimeTriggeredBuild() + if (isNightlyTest) { + load "${env.WORKSPACE}/ci/jenkins/step/singleDevNightlyTest.groovy" + } else { + load "${env.WORKSPACE}/ci/jenkins/step/singleDevTest.groovy" + } + } + } + } + } + + stage ("Cleanup Dev") { + steps { + container('milvus-test-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy" + } + } + } + } + } + post { + unsuccessful { + container('milvus-test-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy" + } + } } } } diff --git a/ci/jenkins/pod/milvus-cpu-version-build-env-pod.yaml b/ci/jenkins/pod/milvus-cpu-version-build-env-pod.yaml new file mode 100644 index 0000000000..561bfe8140 --- /dev/null +++ b/ci/jenkins/pod/milvus-cpu-version-build-env-pod.yaml @@ -0,0 +1,34 @@ +apiVersion: v1 +kind: Pod +metadata: + name: milvus-cpu-build-env + labels: + app: milvus + componet: cpu-build-env +spec: + containers: + - name: milvus-build-env + image: registry.zilliz.com/milvus/milvus-cpu-build-env:v0.6.0-ubuntu18.04 + env: + - name: POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + command: + - cat + tty: true + resources: + limits: + memory: "32Gi" + cpu: "8.0" + requests: + memory: "16Gi" + cpu: "4.0" + - name: milvus-mysql + image: mysql:5.6 + env: + - name: MYSQL_ROOT_PASSWORD + value: 123456 + ports: + - containerPort: 3306 + name: mysql diff --git a/ci/jenkins/pod/milvus-build-env-pod.yaml b/ci/jenkins/pod/milvus-gpu-version-build-env-pod.yaml similarity index 80% rename from ci/jenkins/pod/milvus-build-env-pod.yaml rename to ci/jenkins/pod/milvus-gpu-version-build-env-pod.yaml index da938d8ba2..422dd72ab2 100644 --- a/ci/jenkins/pod/milvus-build-env-pod.yaml +++ b/ci/jenkins/pod/milvus-gpu-version-build-env-pod.yaml @@ -1,14 +1,14 @@ apiVersion: v1 kind: Pod metadata: - name: milvus-build-env + name: milvus-gpu-build-env labels: app: milvus - componet: build-env + componet: gpu-build-env spec: containers: - name: milvus-build-env - image: registry.zilliz.com/milvus/milvus-build-env:v0.5.1-ubuntu18.04 + image: registry.zilliz.com/milvus/milvus-gpu-build-env:v0.6.0-ubuntu18.04 env: - name: POD_IP valueFrom: diff --git a/ci/jenkins/step/build.groovy b/ci/jenkins/step/build.groovy index bae4259a6f..6c1da64a82 100644 --- a/ci/jenkins/step/build.groovy +++ b/ci/jenkins/step/build.groovy @@ -1,8 +1,11 @@ timeout(time: 60, unit: 'MINUTES') { dir ("ci/scripts") { withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) { - sh "export JFROG_ARTFACTORY_URL='${params.JFROG_ARTFACTORY_URL}' && export JFROG_USER_NAME='${USERNAME}' && export JFROG_PASSWORD='${PASSWORD}' && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -l -g -j -u -c" + if ("${env.BINRARY_VERSION}" == "gpu") { + sh "export JFROG_ARTFACTORY_URL='${params.JFROG_ARTFACTORY_URL}' && export JFROG_USER_NAME='${USERNAME}' && export JFROG_PASSWORD='${PASSWORD}' && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -l -g -j -u -c" + } else { + sh "export JFROG_ARTFACTORY_URL='${params.JFROG_ARTFACTORY_URL}' && export JFROG_USER_NAME='${USERNAME}' && export JFROG_PASSWORD='${PASSWORD}' && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -l -m -j -u -c" + } } } } - diff --git a/ci/jenkins/step/cleanupSingleDev.groovy b/ci/jenkins/step/cleanupSingleDev.groovy index 3b8c1833b5..30325e0c91 100644 --- a/ci/jenkins/step/cleanupSingleDev.groovy +++ b/ci/jenkins/step/cleanupSingleDev.groovy @@ -1,12 +1,12 @@ try { - def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu", returnStatus: true + def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}", returnStatus: true if (!helmResult) { - sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu" + sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}" } } catch (exc) { - def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu", returnStatus: true + def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}", returnStatus: true if (!helmResult) { - sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu" + sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}" } throw exc } diff --git a/ci/jenkins/step/deploySingle2Dev.groovy b/ci/jenkins/step/deploySingle2Dev.groovy index f4964df5e2..f1daaf22ec 100644 --- a/ci/jenkins/step/deploySingle2Dev.groovy +++ b/ci/jenkins/step/deploySingle2Dev.groovy @@ -1,9 +1,13 @@ sh 'helm init --client-only --skip-refresh --stable-repo-url https://kubernetes.oss-cn-hangzhou.aliyuncs.com/charts' sh 'helm repo update' dir ('milvus-helm') { - checkout([$class: 'GitSCM', branches: [[name: "0.5.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.5.0:refs/remotes/origin/0.5.0"]]]) - dir ("milvus-gpu") { - sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/db_backend/sqlite_values.yaml -f ci/filebeat/values.yaml --namespace milvus ." + checkout([$class: 'GitSCM', branches: [[name: "0.6.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.6.0:refs/remotes/origin/0.6.0"]]]) + dir ("milvus") { + if ("${env.BINRARY_VERSION}" == "gpu") { + sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f gpu_values.yaml -f ci/filebeat/values.yaml --namespace milvus ." + } else { + sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f ci/filebeat/values.yaml --namespace milvus ." + } } } diff --git a/ci/jenkins/step/publishImages.groovy b/ci/jenkins/step/publishImages.groovy index 62df0c73bf..72e9924c62 100644 --- a/ci/jenkins/step/publishImages.groovy +++ b/ci/jenkins/step/publishImages.groovy @@ -1,6 +1,6 @@ container('publish-images') { timeout(time: 15, unit: 'MINUTES') { - dir ("docker/deploy/${OS_NAME}") { + dir ("docker/deploy/${env.BINRARY_VERSION}/${env.OS_NAME}") { def binaryPackage = "${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz" withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'JFROG_USERNAME', passwordVariable: 'JFROG_PASSWORD')]) { diff --git a/ci/jenkins/step/singleDevNightlyTest.groovy b/ci/jenkins/step/singleDevNightlyTest.groovy index 9aeab2eb4e..d357badfd3 100644 --- a/ci/jenkins/step/singleDevNightlyTest.groovy +++ b/ci/jenkins/step/singleDevNightlyTest.groovy @@ -1,22 +1,26 @@ timeout(time: 90, unit: 'MINUTES') { dir ("tests/milvus_python_test") { sh 'python3 -m pip install -r requirements.txt' - sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local" + sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}-milvus-engine.milvus.svc.cluster.local" } // mysql database backend test load "${env.WORKSPACE}/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy" if (!fileExists('milvus-helm')) { dir ("milvus-helm") { - checkout([$class: 'GitSCM', branches: [[name: "0.5.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.5.0:refs/remotes/origin/0.5.0"]]]) + checkout([$class: 'GitSCM', branches: [[name: "0.6.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.6.0:refs/remotes/origin/0.6.0"]]]) } } dir ("milvus-helm") { - dir ("milvus-gpu") { - sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ." + dir ("milvus") { + if ("${env.BINRARY_VERSION}" == "gpu") { + sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f gpu_values.yaml -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ." + } else { + sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ." + } } } dir ("tests/milvus_python_test") { - sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local" + sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}-milvus-engine.milvus.svc.cluster.local" } } diff --git a/ci/jenkins/step/singleDevTest.groovy b/ci/jenkins/step/singleDevTest.groovy index 86e6f126d9..c1de5907b0 100644 --- a/ci/jenkins/step/singleDevTest.groovy +++ b/ci/jenkins/step/singleDevTest.groovy @@ -1,24 +1,27 @@ timeout(time: 60, unit: 'MINUTES') { dir ("tests/milvus_python_test") { sh 'python3 -m pip install -r requirements.txt' - sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local" + sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}-milvus-engine.milvus.svc.cluster.local" } + // mysql database backend test // load "${env.WORKSPACE}/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy" - // Remove mysql-version tests: 10-28 - // if (!fileExists('milvus-helm')) { // dir ("milvus-helm") { - // checkout([$class: 'GitSCM', branches: [[name: "0.5.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.5.0:refs/remotes/origin/0.5.0"]]]) + // checkout([$class: 'GitSCM', branches: [[name: "0.6.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.6.0:refs/remotes/origin/0.6.0"]]]) // } // } // dir ("milvus-helm") { - // dir ("milvus-gpu") { - // sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ." + // dir ("milvus") { + // if ("${env.BINRARY_VERSION}" == "gpu") { + // sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f gpu_values.yaml -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ." + // } else { + // sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ." + // } // } // } // dir ("tests/milvus_python_test") { - // sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local" + // sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}-milvus-engine.milvus.svc.cluster.local" // } } diff --git a/core/CMakeLists.txt b/core/CMakeLists.txt index 45a2bc5fe6..f24935f788 100644 --- a/core/CMakeLists.txt +++ b/core/CMakeLists.txt @@ -33,7 +33,7 @@ message(STATUS "Build time = ${BUILD_TIME}") MACRO(GET_GIT_BRANCH_NAME GIT_BRANCH_NAME) execute_process(COMMAND sh "-c" "git log --decorate | head -n 1 | sed 's/.*(\\(.*\\))/\\1/' | sed 's/.* \\(.*\\),.*/\\1/' | sed 's=[a-zA-Z]*\/==g'" - OUTPUT_VARIABLE ${GIT_BRANCH_NAME}) + OUTPUT_VARIABLE ${GIT_BRANCH_NAME}) ENDMACRO(GET_GIT_BRANCH_NAME) GET_GIT_BRANCH_NAME(GIT_BRANCH_NAME) @@ -117,17 +117,17 @@ include(DefineOptions) include(BuildUtils) include(ThirdPartyPackages) -if(MILVUS_USE_CCACHE) - find_program(CCACHE_FOUND ccache) - if(CCACHE_FOUND) - message(STATUS "Using ccache: ${CCACHE_FOUND}") - set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_FOUND}) - set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_FOUND}) - # let ccache preserve C++ comments, because some of them may be - # meaningful to the compiler - set(ENV{CCACHE_COMMENTS} "1") - endif(CCACHE_FOUND) -endif() +if (MILVUS_USE_CCACHE) + find_program(CCACHE_FOUND ccache) + if (CCACHE_FOUND) + message(STATUS "Using ccache: ${CCACHE_FOUND}") + set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_FOUND}) + set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_FOUND}) + # let ccache preserve C++ comments, because some of them may be + # meaningful to the compiler + set(ENV{CCACHE_COMMENTS} "1") + endif (CCACHE_FOUND) +endif () set(MILVUS_CPU_VERSION false) if (MILVUS_GPU_VERSION) @@ -142,6 +142,10 @@ else () add_compile_definitions("MILVUS_CPU_VERSION") endif () +if (MILVUS_WITH_PROMETHEUS) + add_compile_definitions("MILVUS_WITH_PROMETHEUS") +endif () + if (CMAKE_BUILD_TYPE STREQUAL "Release") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3 -fPIC -DELPP_THREAD_SAFE -fopenmp") if (MILVUS_GPU_VERSION) @@ -176,9 +180,9 @@ endif () if (MILVUS_GPU_VERSION) configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf/server_gpu_config.template ${CMAKE_CURRENT_SOURCE_DIR}/conf/server_config.yaml) -else() +else () configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf/server_cpu_config.template ${CMAKE_CURRENT_SOURCE_DIR}/conf/server_config.yaml) -endif() +endif () configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf/log_config.template ${CMAKE_CURRENT_SOURCE_DIR}/conf/log_config.conf) diff --git a/core/build.sh b/core/build.sh index 9b690a0261..3afb5d1b37 100755 --- a/core/build.sh +++ b/core/build.sh @@ -14,64 +14,69 @@ CUSTOMIZATION="OFF" # default use ori faiss CUDA_COMPILER=/usr/local/cuda/bin/nvcc GPU_VERSION="OFF" #defaults to CPU version WITH_MKL="OFF" -FAISS_ROOT="" +FAISS_ROOT="" #FAISS root path FAISS_SOURCE="BUNDLED" +WITH_PROMETHEUS="ON" -while getopts "p:d:t:f:ulrcgjhxzm" arg -do - case $arg in - p) - INSTALL_PREFIX=$OPTARG - ;; - d) - DB_PATH=$OPTARG - ;; - t) - BUILD_TYPE=$OPTARG # BUILD_TYPE - ;; - f) - FAISS_ROOT=$OPTARG - FAISS_SOURCE="AUTO" - ;; - u) - echo "Build and run unittest cases" ; - BUILD_UNITTEST="ON"; - ;; - l) - RUN_CPPLINT="ON" - ;; - r) - if [[ -d ${BUILD_OUTPUT_DIR} ]]; then - rm ./${BUILD_OUTPUT_DIR} -r - MAKE_CLEAN="ON" - fi - ;; - c) - BUILD_COVERAGE="ON" - ;; - z) - PROFILING="ON" - ;; - j) - USE_JFROG_CACHE="ON" - ;; - x) - CUSTOMIZATION="OFF" # force use ori faiss - ;; - g) - GPU_VERSION="ON" - ;; - m) - WITH_MKL="ON" - ;; - h) # help - echo " +while getopts "p:d:t:f:ulrcgjhxzme" arg; do + case $arg in + p) + INSTALL_PREFIX=$OPTARG + ;; + d) + DB_PATH=$OPTARG + ;; + t) + BUILD_TYPE=$OPTARG # BUILD_TYPE + ;; + f) + FAISS_ROOT=$OPTARG + FAISS_SOURCE="AUTO" + ;; + u) + echo "Build and run unittest cases" + BUILD_UNITTEST="ON" + ;; + l) + RUN_CPPLINT="ON" + ;; + r) + if [[ -d ${BUILD_OUTPUT_DIR} ]]; then + rm ./${BUILD_OUTPUT_DIR} -r + MAKE_CLEAN="ON" + fi + ;; + c) + BUILD_COVERAGE="ON" + ;; + z) + PROFILING="ON" + ;; + j) + USE_JFROG_CACHE="ON" + ;; + x) + CUSTOMIZATION="OFF" # force use ori faiss + ;; + g) + GPU_VERSION="ON" + ;; + m) + WITH_MKL="ON" + ;; + e) + WITH_PROMETHEUS="OFF" + ;; + h) # help + echo " parameter: -p: install prefix(default: $(pwd)/milvus) -d: db data path(default: /tmp/milvus) -t: build type(default: Debug) --f: faiss root path(default: empty) +-f: FAISS root path(default: empty). The path should be an absolute path + containing the pre-installed lib/ and include/ directory of FAISS. If they can't be found, + we will build the original FAISS from source instead. -u: building unit test options(default: OFF) -l: run cpplint, clang-format and clang-tidy(default: OFF) -r: remove previous build directory(default: OFF) @@ -80,29 +85,30 @@ parameter: -j: use jfrog cache build directory(default: OFF) -g: build GPU version(default: OFF) -m: build with MKL(default: OFF) +-e: build without prometheus(default: OFF) -h: help usage: -./build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} -f \${FAISS_ROOT} [-u] [-l] [-r] [-c] [-z] [-j] [-g] [-m] [-h] +./build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} -f \${FAISS_ROOT} [-u] [-l] [-r] [-c] [-z] [-j] [-g] [-m] [-e] [-h] " - exit 0 - ;; - ?) - echo "ERROR! unknown argument" - exit 1 - ;; - esac + exit 0 + ;; + ?) + echo "ERROR! unknown argument" + exit 1 + ;; + esac done if [[ ! -d ${BUILD_OUTPUT_DIR} ]]; then - mkdir ${BUILD_OUTPUT_DIR} + mkdir ${BUILD_OUTPUT_DIR} fi cd ${BUILD_OUTPUT_DIR} # remove make cache since build.sh -l use default variables # force update the variables each time -make rebuild_cache > /dev/null 2>&1 +make rebuild_cache >/dev/null 2>&1 CMAKE_CMD="cmake \ -DBUILD_UNIT_TEST=${BUILD_UNITTEST} \ @@ -118,30 +124,31 @@ CMAKE_CMD="cmake \ -DCUSTOMIZATION=${CUSTOMIZATION} \ -DMILVUS_GPU_VERSION=${GPU_VERSION} \ -DFAISS_WITH_MKL=${WITH_MKL} \ +-DMILVUS_WITH_PROMETHEUS=${WITH_PROMETHEUS} \ ../" echo ${CMAKE_CMD} ${CMAKE_CMD} if [[ ${MAKE_CLEAN} == "ON" ]]; then - make clean + make clean fi if [[ ${RUN_CPPLINT} == "ON" ]]; then - # cpplint check - make lint - if [ $? -ne 0 ]; then - echo "ERROR! cpplint check failed" - exit 1 - fi - echo "cpplint check passed!" + # cpplint check + make lint + if [ $? -ne 0 ]; then + echo "ERROR! cpplint check failed" + exit 1 + fi + echo "cpplint check passed!" - # clang-format check - make check-clang-format - if [ $? -ne 0 ]; then - echo "ERROR! clang-format check failed" - exit 1 - fi - echo "clang-format check passed!" + # clang-format check + make check-clang-format + if [ $? -ne 0 ]; then + echo "ERROR! clang-format check failed" + exit 1 + fi + echo "clang-format check passed!" # # clang-tidy check # make check-clang-tidy @@ -152,11 +159,11 @@ if [[ ${RUN_CPPLINT} == "ON" ]]; then # echo "clang-tidy check passed!" else - # strip binary symbol - if [[ ${BUILD_TYPE} != "Debug" ]]; then - strip src/milvus_server - fi + # strip binary symbol + if [[ ${BUILD_TYPE} != "Debug" ]]; then + strip src/milvus_server + fi - # compile and build - make -j 8 install || exit 1 + # compile and build + make -j 8 install || exit 1 fi diff --git a/core/src/CMakeLists.txt b/core/src/CMakeLists.txt index 937c0716a4..aa79c084f1 100644 --- a/core/src/CMakeLists.txt +++ b/core/src/CMakeLists.txt @@ -37,6 +37,7 @@ endforeach () aux_source_directory(${MILVUS_ENGINE_SRC}/cache cache_files) aux_source_directory(${MILVUS_ENGINE_SRC}/config config_files) aux_source_directory(${MILVUS_ENGINE_SRC}/metrics metrics_files) +aux_source_directory(${MILVUS_ENGINE_SRC}/metrics/prometheus metrics_prometheus_files) aux_source_directory(${MILVUS_ENGINE_SRC}/db db_main_files) aux_source_directory(${MILVUS_ENGINE_SRC}/db/engine db_engine_files) aux_source_directory(${MILVUS_ENGINE_SRC}/db/insert db_insert_files) @@ -91,6 +92,11 @@ set(engine_files ${wrapper_files} ) +if (MILVUS_WITH_PROMETHEUS) + set(engine_files ${engine_files} + ${metrics_prometheus_files}) +endif () + set(client_grpc_lib grpcpp_channelz grpc++ @@ -115,7 +121,6 @@ set(third_party_libs sqlite ${client_grpc_lib} yaml-cpp - ${prometheus_lib} mysqlpp zlib ${boost_lib} @@ -138,13 +143,19 @@ if (MILVUS_GPU_VERSION) ) endif () -if (MILVUS_ENABLE_PROFILING STREQUAL "ON") +if (MILVUS_ENABLE_PROFILING) set(third_party_libs ${third_party_libs} gperftools libunwind ) endif () +if (MILVUS_WITH_PROMETHEUS) + set(third_party_libs ${third_party_libs} + ${prometheus_lib} + ) +endif () + set(engine_libs pthread libgomp.a @@ -166,13 +177,22 @@ target_link_libraries(milvus_engine ${engine_libs} ) -add_library(metrics STATIC ${metrics_files}) +if (MILVUS_WITH_PROMETHEUS) + add_library(metrics STATIC ${metrics_files} ${metrics_prometheus_files}) +else () + add_library(metrics STATIC ${metrics_files}) +endif () set(metrics_lib yaml-cpp - ${prometheus_lib} ) +if (MILVUS_WITH_PROMETHEUS) + set(metrics_lib ${metrics_lib} + ${prometheus_lib} + ) +endif () + target_link_libraries(metrics ${metrics_lib}) set(server_libs diff --git a/core/src/db/DBImpl.cpp b/core/src/db/DBImpl.cpp index 3e0501b84e..2559b3a46b 100644 --- a/core/src/db/DBImpl.cpp +++ b/core/src/db/DBImpl.cpp @@ -179,9 +179,10 @@ DBImpl::PreloadTable(const std::string& table_id) { } // get all table files from parent table + meta::DatesT dates; std::vector ids; meta::TableFilesSchema files_array; - auto status = GetFilesToSearch(table_id, ids, files_array); + auto status = GetFilesToSearch(table_id, ids, dates, files_array); if (!status.ok()) { return status; } @@ -190,7 +191,7 @@ DBImpl::PreloadTable(const std::string& table_id) { std::vector partiton_array; status = meta_ptr_->ShowPartitions(table_id, partiton_array); for (auto& schema : partiton_array) { - status = GetFilesToSearch(schema.table_id_, ids, files_array); + status = GetFilesToSearch(schema.table_id_, ids, dates, files_array); } int64_t size = 0; @@ -304,6 +305,10 @@ DBImpl::InsertVectors(const std::string& table_id, const std::string& partition_ if (!partition_tag.empty()) { std::string partition_name; status = meta_ptr_->GetPartitionName(table_id, partition_tag, target_table_name); + if (!status.ok()) { + ENGINE_LOG_ERROR << status.message(); + return status; + } } // insert vectors into target table @@ -400,7 +405,7 @@ DBImpl::Query(const std::string& table_id, const std::vector& parti if (partition_tags.empty()) { // no partition tag specified, means search in whole table // get all table files from parent table - status = GetFilesToSearch(table_id, ids, files_array); + status = GetFilesToSearch(table_id, ids, dates, files_array); if (!status.ok()) { return status; } @@ -408,7 +413,7 @@ DBImpl::Query(const std::string& table_id, const std::vector& parti std::vector partiton_array; status = meta_ptr_->ShowPartitions(table_id, partiton_array); for (auto& schema : partiton_array) { - status = GetFilesToSearch(schema.table_id_, ids, files_array); + status = GetFilesToSearch(schema.table_id_, ids, dates, files_array); } } else { // get files from specified partitions @@ -416,7 +421,7 @@ DBImpl::Query(const std::string& table_id, const std::vector& parti GetPartitionsByTags(table_id, partition_tags, partition_name_array); for (auto& partition_name : partition_name_array) { - status = GetFilesToSearch(partition_name, ids, files_array); + status = GetFilesToSearch(partition_name, ids, dates, files_array); } } @@ -446,7 +451,7 @@ DBImpl::QueryByFileID(const std::string& table_id, const std::vector lck(compact_result_mutex_); if (compact_thread_results_.empty()) { + // collect merge files for all tables(if compact_table_ids_ is empty) for two reasons: + // 1. other tables may still has un-merged files + // 2. server may be closed unexpected, these un-merge files need to be merged when server restart + if (compact_table_ids_.empty()) { + std::vector table_schema_array; + meta_ptr_->AllTables(table_schema_array); + for (auto& schema : table_schema_array) { + compact_table_ids_.insert(schema.table_id_); + } + } + + // start merge file thread compact_thread_results_.push_back( compact_thread_pool_.enqueue(&DBImpl::BackgroundCompaction, this, compact_table_ids_)); compact_table_ids_.clear(); @@ -717,7 +734,7 @@ DBImpl::BackgroundMergeFiles(const std::string& table_id) { for (auto& kv : raw_files) { auto files = kv.second; if (files.size() < options_.merge_trigger_number_) { - ENGINE_LOG_DEBUG << "Files number not greater equal than merge trigger number, skip merge action"; + ENGINE_LOG_TRACE << "Files number not greater equal than merge trigger number, skip merge action"; continue; } @@ -734,7 +751,7 @@ DBImpl::BackgroundMergeFiles(const std::string& table_id) { void DBImpl::BackgroundCompaction(std::set table_ids) { - ENGINE_LOG_TRACE << " Background compaction thread start"; + ENGINE_LOG_TRACE << "Background compaction thread start"; Status status; for (auto& table_id : table_ids) { @@ -757,7 +774,7 @@ DBImpl::BackgroundCompaction(std::set table_ids) { } meta_ptr_->CleanUpFilesWithTTL(ttl); - ENGINE_LOG_TRACE << " Background compaction thread exit"; + ENGINE_LOG_TRACE << "Background compaction thread exit"; } void @@ -817,9 +834,8 @@ DBImpl::BackgroundBuildIndex() { } Status -DBImpl::GetFilesToSearch(const std::string& table_id, const std::vector& file_ids, +DBImpl::GetFilesToSearch(const std::string& table_id, const std::vector& file_ids, const meta::DatesT& dates, meta::TableFilesSchema& files) { - meta::DatesT dates; meta::DatePartionedTableFilesSchema date_files; auto status = meta_ptr_->FilesToSearch(table_id, file_ids, dates, date_files); if (!status.ok()) { diff --git a/core/src/db/DBImpl.h b/core/src/db/DBImpl.h index 932fc990e4..a0c5cc356d 100644 --- a/core/src/db/DBImpl.h +++ b/core/src/db/DBImpl.h @@ -153,7 +153,8 @@ class DBImpl : public DB { MemSerialize(); Status - GetFilesToSearch(const std::string& table_id, const std::vector& file_ids, meta::TableFilesSchema& files); + GetFilesToSearch(const std::string& table_id, const std::vector& file_ids, const meta::DatesT& dates, + meta::TableFilesSchema& files); Status GetPartitionsByTags(const std::string& table_id, const std::vector& partition_tags, diff --git a/core/src/db/meta/MySQLMetaImpl.cpp b/core/src/db/meta/MySQLMetaImpl.cpp index ff36554c10..bf83447806 100644 --- a/core/src/db/meta/MySQLMetaImpl.cpp +++ b/core/src/db/meta/MySQLMetaImpl.cpp @@ -1392,6 +1392,7 @@ MySQLMetaImpl::FilesToMerge(const std::string& table_id, DatePartionedTableFiles } // Scoped Connection Status ret; + int64_t to_merge_files = 0; for (auto& resRow : res) { TableFileSchema table_file; table_file.file_size_ = resRow["file_size"]; @@ -1420,13 +1421,14 @@ MySQLMetaImpl::FilesToMerge(const std::string& table_id, DatePartionedTableFiles auto dateItr = files.find(table_file.date_); if (dateItr == files.end()) { files[table_file.date_] = TableFilesSchema(); + to_merge_files++; } files[table_file.date_].push_back(table_file); } - if (res.size() > 0) { - ENGINE_LOG_DEBUG << "Collect " << res.size() << " to-merge files"; + if (to_merge_files > 0) { + ENGINE_LOG_TRACE << "Collect " << to_merge_files << " to-merge files"; } return ret; } catch (std::exception& e) { @@ -1809,6 +1811,7 @@ MySQLMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) { mysqlpp::StoreQueryResult res = cleanUpFilesWithTTLQuery.store(); + int64_t remove_tables = 0; if (!res.empty()) { std::stringstream idsToDeleteSS; for (auto& resRow : res) { @@ -1817,7 +1820,7 @@ MySQLMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) { resRow["table_id"].to_string(table_id); utils::DeleteTablePath(options_, table_id, false); // only delete empty folder - + remove_tables++; idsToDeleteSS << "id = " << std::to_string(id) << " OR "; } std::string idsToDeleteStr = idsToDeleteSS.str(); @@ -1832,8 +1835,8 @@ MySQLMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) { } } - if (res.size() > 0) { - ENGINE_LOG_DEBUG << "Remove " << res.size() << " tables from meta"; + if (remove_tables > 0) { + ENGINE_LOG_DEBUG << "Remove " << remove_tables << " tables from meta"; } } // Scoped Connection } catch (std::exception& e) { diff --git a/core/src/db/meta/SqliteMetaImpl.cpp b/core/src/db/meta/SqliteMetaImpl.cpp index 6221dd8ac1..22e953fe9d 100644 --- a/core/src/db/meta/SqliteMetaImpl.cpp +++ b/core/src/db/meta/SqliteMetaImpl.cpp @@ -971,6 +971,7 @@ SqliteMetaImpl::FilesToMerge(const std::string& table_id, DatePartionedTableFile order_by(&TableFileSchema::file_size_).desc()); Status result; + int64_t to_merge_files = 0; for (auto& file : selected) { TableFileSchema table_file; table_file.file_size_ = std::get<4>(file); @@ -999,11 +1000,13 @@ SqliteMetaImpl::FilesToMerge(const std::string& table_id, DatePartionedTableFile if (dateItr == files.end()) { files[table_file.date_] = TableFilesSchema(); } + files[table_file.date_].push_back(table_file); + to_merge_files++; } - if (selected.size() > 0) { - ENGINE_LOG_DEBUG << "Collect " << selected.size() << " to-merge files"; + if (to_merge_files > 0) { + ENGINE_LOG_TRACE << "Collect " << to_merge_files << " to-merge files"; } return result; } catch (std::exception& e) { @@ -1313,16 +1316,18 @@ SqliteMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) { try { server::MetricCollector metric; + int64_t remove_tables = 0; for (auto& table_id : table_ids) { auto selected = ConnectorPtr->select(columns(&TableFileSchema::file_id_), where(c(&TableFileSchema::table_id_) == table_id)); if (selected.size() == 0) { utils::DeleteTablePath(options_, table_id); + remove_tables++; } } - if (table_ids.size() > 0) { - ENGINE_LOG_DEBUG << "Remove " << table_ids.size() << " tables folder"; + if (remove_tables) { + ENGINE_LOG_DEBUG << "Remove " << remove_tables << " tables folder"; } } catch (std::exception& e) { return HandleException("Encounter exception when delete table folder", e.what()); diff --git a/core/src/index/knowhere/knowhere/adapter/SptagAdapter.cpp b/core/src/index/knowhere/knowhere/adapter/SptagAdapter.cpp index b4c3910a01..db4a415261 100644 --- a/core/src/index/knowhere/knowhere/adapter/SptagAdapter.cpp +++ b/core/src/index/knowhere/knowhere/adapter/SptagAdapter.cpp @@ -89,34 +89,35 @@ ConvertToDataset(std::vector query_results) { } } - auto id_buf = MakeMutableBufferSmart((uint8_t*)p_id, sizeof(int64_t) * elems); - auto dist_buf = MakeMutableBufferSmart((uint8_t*)p_dist, sizeof(float) * elems); - - // TODO: magic - std::vector id_bufs{nullptr, id_buf}; - std::vector dist_bufs{nullptr, dist_buf}; - - auto int64_type = std::make_shared(); - auto float_type = std::make_shared(); - - auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs); - auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs); - // auto id_array_data = std::make_shared(int64_type, sizeof(int64_t) * elems, id_bufs); - // auto dist_array_data = std::make_shared(float_type, sizeof(float) * elems, dist_bufs); - - // auto ids = ConstructInt64Array((uint8_t*)p_id, sizeof(int64_t) * elems); - // auto dists = ConstructFloatArray((uint8_t*)p_dist, sizeof(float) * elems); - - auto ids = std::make_shared>(id_array_data); - auto dists = std::make_shared>(dist_array_data); - std::vector array{ids, dists}; - - auto field_id = std::make_shared("id", std::make_shared()); - auto field_dist = std::make_shared("dist", std::make_shared()); - std::vector fields{field_id, field_dist}; - auto schema = std::make_shared(fields); - - return std::make_shared(array, schema); + // auto id_buf = MakeMutableBufferSmart((uint8_t*)p_id, sizeof(int64_t) * elems); + // auto dist_buf = MakeMutableBufferSmart((uint8_t*)p_dist, sizeof(float) * elems); + // + // // TODO: magic + // std::vector id_bufs{nullptr, id_buf}; + // std::vector dist_bufs{nullptr, dist_buf}; + // + // auto int64_type = std::make_shared(); + // auto float_type = std::make_shared(); + // + // auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs); + // auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs); + // // auto id_array_data = std::make_shared(int64_type, sizeof(int64_t) * elems, id_bufs); + // // auto dist_array_data = std::make_shared(float_type, sizeof(float) * elems, dist_bufs); + // + // // auto ids = ConstructInt64Array((uint8_t*)p_id, sizeof(int64_t) * elems); + // // auto dists = ConstructFloatArray((uint8_t*)p_dist, sizeof(float) * elems); + // + // auto ids = std::make_shared>(id_array_data); + // auto dists = std::make_shared>(dist_array_data); + // std::vector array{ids, dists}; + // + // auto field_id = std::make_shared("id", std::make_shared()); + // auto field_dist = std::make_shared("dist", std::make_shared()); + // std::vector fields{field_id, field_dist}; + // auto schema = std::make_shared(fields); + // + // return std::make_shared(array, schema); + return std::make_shared((void*)p_id, (void*)p_dist); } } // namespace knowhere diff --git a/core/src/index/knowhere/knowhere/common/Dataset.h b/core/src/index/knowhere/knowhere/common/Dataset.h index 1331239dd6..b101aba6a7 100644 --- a/core/src/index/knowhere/knowhere/common/Dataset.h +++ b/core/src/index/knowhere/knowhere/common/Dataset.h @@ -54,6 +54,9 @@ class Dataset { : tensor_(std::move(tensor)), tensor_schema_(std::move(tensor_schema)) { } + Dataset(void* ids, void* dists) : ids_(ids), dists_(dists) { + } + Dataset(const Dataset&) = delete; Dataset& operator=(const Dataset&) = delete; @@ -128,6 +131,16 @@ class Dataset { tensor_schema_ = std::move(tensor_schema); } + void* + ids() { + return ids_; + } + + void* + dist() { + return dists_; + } + // const Config & // meta() const { return meta_; } @@ -141,6 +154,9 @@ class Dataset { SchemaPtr array_schema_; std::vector tensor_; SchemaPtr tensor_schema_; + // TODO(yukun): using smart pointer + void* ids_; + void* dists_; // Config meta_; }; diff --git a/core/src/index/knowhere/knowhere/index/vector_index/IndexIDMAP.cpp b/core/src/index/knowhere/knowhere/index/vector_index/IndexIDMAP.cpp index 98d25e5e5c..7aedf98613 100644 --- a/core/src/index/knowhere/knowhere/index/vector_index/IndexIDMAP.cpp +++ b/core/src/index/knowhere/knowhere/index/vector_index/IndexIDMAP.cpp @@ -80,23 +80,24 @@ IDMAP::Search(const DatasetPtr& dataset, const Config& config) { search_impl(rows, (float*)p_data, config->k, res_dis, res_ids, Config()); - auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems); - auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems); - - std::vector id_bufs{nullptr, id_buf}; - std::vector dist_bufs{nullptr, dist_buf}; - - auto int64_type = std::make_shared(); - auto float_type = std::make_shared(); - - auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs); - auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs); - - auto ids = std::make_shared>(id_array_data); - auto dists = std::make_shared>(dist_array_data); - std::vector array{ids, dists}; - - return std::make_shared(array, nullptr); + // auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems); + // auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems); + // + // std::vector id_bufs{nullptr, id_buf}; + // std::vector dist_bufs{nullptr, dist_buf}; + // + // auto int64_type = std::make_shared(); + // auto float_type = std::make_shared(); + // + // auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs); + // auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs); + // + // auto ids = std::make_shared>(id_array_data); + // auto dists = std::make_shared>(dist_array_data); + // std::vector array{ids, dists}; + // + // return std::make_shared(array, nullptr); + return std::make_shared((void*)res_ids, (void*)res_dis); } void diff --git a/core/src/index/knowhere/knowhere/index/vector_index/IndexIVF.cpp b/core/src/index/knowhere/knowhere/index/vector_index/IndexIVF.cpp index b2a2af29a8..7f30a97ea0 100644 --- a/core/src/index/knowhere/knowhere/index/vector_index/IndexIVF.cpp +++ b/core/src/index/knowhere/knowhere/index/vector_index/IndexIVF.cpp @@ -139,23 +139,23 @@ IVF::Search(const DatasetPtr& dataset, const Config& config) { // std::cout << ss_res_id.str() << std::endl; // std::cout << ss_res_dist.str() << std::endl << std::endl; - auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems); - auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems); + // auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems); + // auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems); + // + // std::vector id_bufs{nullptr, id_buf}; + // std::vector dist_bufs{nullptr, dist_buf}; + // + // auto int64_type = std::make_shared(); + // auto float_type = std::make_shared(); + // + // auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs); + // auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs); + // + // auto ids = std::make_shared>(id_array_data); + // auto dists = std::make_shared>(dist_array_data); + // std::vector array{ids, dists}; - std::vector id_bufs{nullptr, id_buf}; - std::vector dist_bufs{nullptr, dist_buf}; - - auto int64_type = std::make_shared(); - auto float_type = std::make_shared(); - - auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs); - auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs); - - auto ids = std::make_shared>(id_array_data); - auto dists = std::make_shared>(dist_array_data); - std::vector array{ids, dists}; - - return std::make_shared(array, nullptr); + return std::make_shared((void*)res_ids, (void*)res_dis); } void diff --git a/core/src/index/knowhere/knowhere/index/vector_index/IndexNSG.cpp b/core/src/index/knowhere/knowhere/index/vector_index/IndexNSG.cpp index 8f6d93d7ff..204819517a 100644 --- a/core/src/index/knowhere/knowhere/index/vector_index/IndexNSG.cpp +++ b/core/src/index/knowhere/knowhere/index/vector_index/IndexNSG.cpp @@ -88,23 +88,24 @@ NSG::Search(const DatasetPtr& dataset, const Config& config) { s_params.search_length = build_cfg->search_length; index_->Search((float*)p_data, rows, dim, build_cfg->k, res_dis, res_ids, s_params); - auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems); - auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems); + // auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems); + // auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems); - std::vector id_bufs{nullptr, id_buf}; - std::vector dist_bufs{nullptr, dist_buf}; - - auto int64_type = std::make_shared(); - auto float_type = std::make_shared(); - - auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs); - auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs); - - auto ids = std::make_shared>(id_array_data); - auto dists = std::make_shared>(dist_array_data); - std::vector array{ids, dists}; - - return std::make_shared(array, nullptr); + // std::vector id_bufs{nullptr, id_buf}; + // std::vector dist_bufs{nullptr, dist_buf}; + // + // auto int64_type = std::make_shared(); + // auto float_type = std::make_shared(); + // + // auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs); + // auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs); + // + // auto ids = std::make_shared>(id_array_data); + // auto dists = std::make_shared>(dist_array_data); + // std::vector array{ids, dists}; + // + // return std::make_shared(array, nullptr); + return std::make_shared((void*)res_ids, (void*)res_dis); } IndexModelPtr diff --git a/core/src/index/unittest/test_ivf.cpp b/core/src/index/unittest/test_ivf.cpp index ae1034b9e0..5cb820df95 100644 --- a/core/src/index/unittest/test_ivf.cpp +++ b/core/src/index/unittest/test_ivf.cpp @@ -181,11 +181,13 @@ TEST_P(IVFTest, clone_test) { // PrintResult(result, nq, k); auto AssertEqual = [&](knowhere::DatasetPtr p1, knowhere::DatasetPtr p2) { - auto ids_p1 = p1->array()[0]; - auto ids_p2 = p2->array()[0]; + auto ids_p1 = p1->ids(); + auto ids_p2 = p2->ids(); for (int i = 0; i < nq * k; ++i) { - EXPECT_EQ(*(ids_p2->data()->GetValues(1, i)), *(ids_p1->data()->GetValues(1, i))); + EXPECT_EQ(*((int64_t*)(ids_p2) + i), *((int64_t*)(ids_p1) + i)); + // EXPECT_EQ(*(ids_p2->data()->GetValues(1, i)), *(ids_p1->data()->GetValues(1, + // i))); } }; diff --git a/core/src/index/unittest/test_kdt.cpp b/core/src/index/unittest/test_kdt.cpp index 5400881875..bbc7dcf94c 100644 --- a/core/src/index/unittest/test_kdt.cpp +++ b/core/src/index/unittest/test_kdt.cpp @@ -66,15 +66,19 @@ TEST_F(KDTTest, kdt_basic) { AssertAnns(result, nq, k); { - auto ids = result->array()[0]; - auto dists = result->array()[1]; + // auto ids = result->array()[0]; + // auto dists = result->array()[1]; + auto ids = result->ids(); + auto dists = result->dist(); std::stringstream ss_id; std::stringstream ss_dist; for (auto i = 0; i < nq; i++) { for (auto j = 0; j < k; ++j) { - ss_id << *ids->data()->GetValues(1, i * k + j) << " "; - ss_dist << *dists->data()->GetValues(1, i * k + j) << " "; + ss_id << *((int64_t*)(ids) + i * k + j) << " "; + ss_dist << *((float*)(dists) + i * k + j) << " "; + // ss_id << *ids->data()->GetValues(1, i * k + j) << " "; + // ss_dist << *dists->data()->GetValues(1, i * k + j) << " "; } ss_id << std::endl; ss_dist << std::endl; diff --git a/core/src/index/unittest/utils.cpp b/core/src/index/unittest/utils.cpp index d4a59bafbb..2556b60fad 100644 --- a/core/src/index/unittest/utils.cpp +++ b/core/src/index/unittest/utils.cpp @@ -151,9 +151,10 @@ generate_query_dataset(int64_t nb, int64_t dim, float* xb) { void AssertAnns(const knowhere::DatasetPtr& result, const int& nq, const int& k) { - auto ids = result->array()[0]; + auto ids = result->ids(); for (auto i = 0; i < nq; i++) { - EXPECT_EQ(i, *(ids->data()->GetValues(1, i * k))); + EXPECT_EQ(i, *((int64_t*)(ids) + i * k)); + // EXPECT_EQ(i, *(ids->data()->GetValues(1, i * k))); } } diff --git a/core/src/metrics/Metrics.cpp b/core/src/metrics/Metrics.cpp index 51db5555b8..5fd3553cdc 100644 --- a/core/src/metrics/Metrics.cpp +++ b/core/src/metrics/Metrics.cpp @@ -16,8 +16,10 @@ // under the License. #include "metrics/Metrics.h" -#include "PrometheusMetrics.h" #include "server/Config.h" +#ifdef MILVUS_WITH_PROMETHEUS +#include "metrics/prometheus/PrometheusMetrics.h" +#endif #include @@ -37,11 +39,15 @@ Metrics::CreateMetricsCollector() { config.GetMetricConfigCollector(collector_type_str); +#ifdef MILVUS_WITH_PROMETHEUS if (collector_type_str == "prometheus") { return PrometheusMetrics::GetInstance(); } else { return MetricsBase::GetInstance(); } +#else + return MetricsBase::GetInstance(); +#endif } } // namespace server diff --git a/core/src/metrics/PrometheusMetrics.cpp b/core/src/metrics/prometheus/PrometheusMetrics.cpp similarity index 98% rename from core/src/metrics/PrometheusMetrics.cpp rename to core/src/metrics/prometheus/PrometheusMetrics.cpp index 770b34dc47..19b2683280 100644 --- a/core/src/metrics/PrometheusMetrics.cpp +++ b/core/src/metrics/prometheus/PrometheusMetrics.cpp @@ -15,9 +15,9 @@ // specific language governing permissions and limitations // under the License. -#include "metrics/PrometheusMetrics.h" -#include "SystemInfo.h" +#include "metrics/prometheus/PrometheusMetrics.h" #include "cache/GpuCacheMgr.h" +#include "metrics/SystemInfo.h" #include "server/Config.h" #include "utils/Log.h" diff --git a/core/src/metrics/PrometheusMetrics.h b/core/src/metrics/prometheus/PrometheusMetrics.h similarity index 99% rename from core/src/metrics/PrometheusMetrics.h rename to core/src/metrics/prometheus/PrometheusMetrics.h index ef60f9a231..5a452ca02c 100644 --- a/core/src/metrics/PrometheusMetrics.h +++ b/core/src/metrics/prometheus/PrometheusMetrics.h @@ -24,7 +24,7 @@ #include #include -#include "MetricBase.h" +#include "metrics/MetricBase.h" #include "utils/Error.h" #define METRICS_NOW_TIME std::chrono::system_clock::now() diff --git a/core/src/wrapper/VecImpl.cpp b/core/src/wrapper/VecImpl.cpp index 05293b53f1..e7967cbf59 100644 --- a/core/src/wrapper/VecImpl.cpp +++ b/core/src/wrapper/VecImpl.cpp @@ -84,8 +84,8 @@ VecIndexImpl::Search(const int64_t& nq, const float* xq, float* dist, int64_t* i Config search_cfg = cfg; auto res = index_->Search(dataset, search_cfg); - auto ids_array = res->array()[0]; - auto dis_array = res->array()[1]; + // auto ids_array = res->array()[0]; + // auto dis_array = res->array()[1]; //{ // auto& ids = ids_array; @@ -104,12 +104,14 @@ VecIndexImpl::Search(const int64_t& nq, const float* xq, float* dist, int64_t* i // std::cout << "dist\n" << ss_dist.str() << std::endl; //} - auto p_ids = ids_array->data()->GetValues(1, 0); - auto p_dist = dis_array->data()->GetValues(1, 0); + // auto p_ids = ids_array->data()->GetValues(1, 0); + // auto p_dist = dis_array->data()->GetValues(1, 0); // TODO(linxj): avoid copy here. - memcpy(ids, p_ids, sizeof(int64_t) * nq * k); - memcpy(dist, p_dist, sizeof(float) * nq * k); + memcpy(ids, res->ids(), sizeof(int64_t) * nq * k); + memcpy(dist, res->dist(), sizeof(float) * nq * k); + free(res->ids()); + free(res->dist()); } catch (knowhere::KnowhereException& e) { WRAPPER_LOG_ERROR << e.what(); return Status(KNOWHERE_UNEXPECTED_ERROR, e.what()); diff --git a/core/unittest/CMakeLists.txt b/core/unittest/CMakeLists.txt index 10ab362e77..01e1054f7e 100644 --- a/core/unittest/CMakeLists.txt +++ b/core/unittest/CMakeLists.txt @@ -110,12 +110,18 @@ set(unittest_libs pthread metrics gfortran - prometheus-cpp-pull - prometheus-cpp-push - prometheus-cpp-core - dl - z ) +if (MILVUS_WITH_PROMETHEUS) + set(unittest_libs ${unittest_libs} + prometheus-cpp-push + prometheus-cpp-pull + prometheus-cpp-core + ) +endif () +set(unittest_libs ${unittest_libs} + dl + z + ) if (MILVUS_GPU_VERSION) include_directories("${CUDA_INCLUDE_DIRS}") @@ -135,4 +141,4 @@ add_subdirectory(db) add_subdirectory(wrapper) add_subdirectory(metrics) add_subdirectory(scheduler) -add_subdirectory(server) \ No newline at end of file +add_subdirectory(server) diff --git a/core/unittest/metrics/CMakeLists.txt b/core/unittest/metrics/CMakeLists.txt index ad9d4e3943..11c25a71e4 100644 --- a/core/unittest/metrics/CMakeLists.txt +++ b/core/unittest/metrics/CMakeLists.txt @@ -18,10 +18,15 @@ #------------------------------------------------------------------------------- set(test_files - ${CMAKE_CURRENT_SOURCE_DIR}/test_metricbase.cpp - ${CMAKE_CURRENT_SOURCE_DIR}/test_metrics.cpp - ${CMAKE_CURRENT_SOURCE_DIR}/test_prometheus.cpp - ${CMAKE_CURRENT_SOURCE_DIR}/utils.cpp) + test_metricbase.cpp + test_metrics.cpp + utils.cpp + ) + +if (MILVUS_WITH_PROMETHEUS) + set(test_files ${test_files} + test_prometheus.cpp) +endif () add_executable(test_metrics ${common_files} diff --git a/core/unittest/metrics/test_prometheus.cpp b/core/unittest/metrics/test_prometheus.cpp index 50e845d1c2..6e339b73b4 100644 --- a/core/unittest/metrics/test_prometheus.cpp +++ b/core/unittest/metrics/test_prometheus.cpp @@ -15,8 +15,8 @@ // specific language governing permissions and limitations // under the License. -#include "metrics/PrometheusMetrics.h" #include "server/Config.h" +#include "metrics/prometheus/PrometheusMetrics.h" #include #include diff --git a/docker/build_env/cpu/ubuntu16.04/Dockerfile b/docker/build_env/cpu/ubuntu16.04/Dockerfile new file mode 100644 index 0000000000..45e2b53938 --- /dev/null +++ b/docker/build_env/cpu/ubuntu16.04/Dockerfile @@ -0,0 +1,29 @@ +FROM ubuntu:16.04 + +ENV DEBIAN_FRONTEND noninteractive + +RUN apt-get update && apt-get install -y --no-install-recommends wget ca-certificates gnupg2 apt-transport-https && \ + wget -P /tmp https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \ + apt-key add /tmp/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \ + sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \ + wget -qO- "https://cmake.org/files/v3.14/cmake-3.14.3-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local && \ + apt-get update && apt-get install -y --no-install-recommends \ + g++ git gfortran lsb-core \ + libboost-serialization-dev libboost-filesystem-dev libboost-system-dev libboost-regex-dev \ + curl libtool automake libssl-dev pkg-config libcurl4-openssl-dev python3-pip \ + clang-format-6.0 clang-tidy-6.0 \ + lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \ + apt-get remove --purge -y && \ + rm -rf /var/lib/apt/lists/* + +RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so \ + /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so + +RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh' + +COPY docker-entrypoint.sh /app/docker-entrypoint.sh + +WORKDIR /opt/milvus + +ENTRYPOINT [ "/app/docker-entrypoint.sh" ] +CMD [ "start" ] diff --git a/docker/build_env/ubuntu16.04/docker-entrypoint.sh b/docker/build_env/cpu/ubuntu16.04/docker-entrypoint.sh similarity index 100% rename from docker/build_env/ubuntu16.04/docker-entrypoint.sh rename to docker/build_env/cpu/ubuntu16.04/docker-entrypoint.sh diff --git a/docker/build_env/cpu/ubuntu18.04/Dockerfile b/docker/build_env/cpu/ubuntu18.04/Dockerfile new file mode 100644 index 0000000000..7c76e2ec7a --- /dev/null +++ b/docker/build_env/cpu/ubuntu18.04/Dockerfile @@ -0,0 +1,29 @@ +FROM ubuntu:18.04 + +ENV DEBIAN_FRONTEND noninteractive + +RUN apt-get update && apt-get install -y --no-install-recommends wget ca-certificates gnupg2 && \ + wget -P /tmp https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \ + apt-key add /tmp/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \ + sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \ + wget -qO- "https://cmake.org/files/v3.14/cmake-3.14.3-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local && \ + apt-get update && apt-get install -y --no-install-recommends \ + g++ git gfortran lsb-core \ + libboost-serialization-dev libboost-filesystem-dev libboost-system-dev libboost-regex-dev \ + curl libtool automake libssl-dev pkg-config libcurl4-openssl-dev python3-pip \ + clang-format-6.0 clang-tidy-6.0 \ + lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \ + apt-get remove --purge -y && \ + rm -rf /var/lib/apt/lists/* + +RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so \ + /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so + +RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh' + +COPY docker-entrypoint.sh /app/docker-entrypoint.sh + +WORKDIR /opt/milvus + +ENTRYPOINT [ "/app/docker-entrypoint.sh" ] +CMD [ "start" ] diff --git a/docker/build_env/ubuntu18.04/docker-entrypoint.sh b/docker/build_env/cpu/ubuntu18.04/docker-entrypoint.sh similarity index 100% rename from docker/build_env/ubuntu18.04/docker-entrypoint.sh rename to docker/build_env/cpu/ubuntu18.04/docker-entrypoint.sh diff --git a/docker/build_env/ubuntu16.04/Dockerfile b/docker/build_env/gpu/ubuntu16.04/Dockerfile similarity index 86% rename from docker/build_env/ubuntu16.04/Dockerfile rename to docker/build_env/gpu/ubuntu16.04/Dockerfile index a93ce83f72..d35a7dccfd 100644 --- a/docker/build_env/ubuntu16.04/Dockerfile +++ b/docker/build_env/gpu/ubuntu16.04/Dockerfile @@ -11,15 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends wget && \ git flex bison gfortran lsb-core \ curl libtool automake libboost1.58-all-dev libssl-dev pkg-config libcurl4-openssl-dev python3-pip \ clang-format-6.0 clang-tidy-6.0 \ - lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.4-243 intel-mkl-core-2019.4-243 && \ + lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \ apt-get remove --purge -y && \ rm -rf /var/lib/apt/lists/* RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so -RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.4.243/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh' +RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh' COPY docker-entrypoint.sh /app/docker-entrypoint.sh + +WORKDIR /opt/milvus + ENTRYPOINT [ "/app/docker-entrypoint.sh" ] CMD [ "start" ] - diff --git a/docker/build_env/gpu/ubuntu16.04/docker-entrypoint.sh b/docker/build_env/gpu/ubuntu16.04/docker-entrypoint.sh new file mode 100755 index 0000000000..1e85e7e9e1 --- /dev/null +++ b/docker/build_env/gpu/ubuntu16.04/docker-entrypoint.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -e + +if [ "$1" = 'start' ]; then + tail -f /dev/null +fi + +exec "$@" + diff --git a/docker/build_env/ubuntu18.04/Dockerfile b/docker/build_env/gpu/ubuntu18.04/Dockerfile similarity index 86% rename from docker/build_env/ubuntu18.04/Dockerfile rename to docker/build_env/gpu/ubuntu18.04/Dockerfile index 7f7353de31..9f2f3f55ac 100644 --- a/docker/build_env/ubuntu18.04/Dockerfile +++ b/docker/build_env/gpu/ubuntu18.04/Dockerfile @@ -11,15 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends wget && \ git flex bison gfortran lsb-core \ curl libtool automake libboost-all-dev libssl-dev pkg-config libcurl4-openssl-dev python3-pip \ clang-format-6.0 clang-tidy-6.0 \ - lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.4-243 intel-mkl-core-2019.4-243 && \ + lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \ apt-get remove --purge -y && \ rm -rf /var/lib/apt/lists/* RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so -RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.4.243/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh' +RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh' COPY docker-entrypoint.sh /app/docker-entrypoint.sh + +WORKDIR /opt/milvus + ENTRYPOINT [ "/app/docker-entrypoint.sh" ] CMD [ "start" ] - diff --git a/docker/build_env/gpu/ubuntu18.04/docker-entrypoint.sh b/docker/build_env/gpu/ubuntu18.04/docker-entrypoint.sh new file mode 100755 index 0000000000..1e85e7e9e1 --- /dev/null +++ b/docker/build_env/gpu/ubuntu18.04/docker-entrypoint.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -e + +if [ "$1" = 'start' ]; then + tail -f /dev/null +fi + +exec "$@" + diff --git a/docker/deploy/cpu/ubuntu16.04/Dockerfile b/docker/deploy/cpu/ubuntu16.04/Dockerfile new file mode 100644 index 0000000000..9e90c6a89c --- /dev/null +++ b/docker/deploy/cpu/ubuntu16.04/Dockerfile @@ -0,0 +1,20 @@ +FROM ubuntu:16.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gfortran libsqlite3-dev libmysqlclient-dev libcurl4-openssl-dev python3 && \ + apt-get remove --purge -y && \ + rm -rf /var/lib/apt/lists/* + +RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so + +COPY ./docker-entrypoint.sh /opt +COPY ./milvus /opt/milvus +ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/milvus/lib" + +WORKDIR /opt/milvus + +ENTRYPOINT [ "/opt/docker-entrypoint.sh" ] + +CMD [ "start" ] + +EXPOSE 19530 diff --git a/docker/deploy/ubuntu16.04/docker-entrypoint.sh b/docker/deploy/cpu/ubuntu16.04/docker-entrypoint.sh similarity index 99% rename from docker/deploy/ubuntu16.04/docker-entrypoint.sh rename to docker/deploy/cpu/ubuntu16.04/docker-entrypoint.sh index 446c174d74..12937df395 100755 --- a/docker/deploy/ubuntu16.04/docker-entrypoint.sh +++ b/docker/deploy/cpu/ubuntu16.04/docker-entrypoint.sh @@ -7,4 +7,3 @@ if [ "$1" == 'start' ]; then fi exec "$@" - diff --git a/docker/deploy/cpu/ubuntu18.04/Dockerfile b/docker/deploy/cpu/ubuntu18.04/Dockerfile new file mode 100644 index 0000000000..0064f4aad2 --- /dev/null +++ b/docker/deploy/cpu/ubuntu18.04/Dockerfile @@ -0,0 +1,21 @@ +FROM ubuntu:18.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + gfortran libsqlite3-dev libmysqlclient-dev libcurl4-openssl-dev python3 && \ + apt-get remove --purge -y && \ + rm -rf /var/lib/apt/lists/* + +RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so + +COPY ./docker-entrypoint.sh /opt +COPY ./milvus /opt/milvus +ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/milvus/lib" + +WORKDIR /opt/milvus + +ENTRYPOINT [ "/opt/docker-entrypoint.sh" ] + +CMD [ "start" ] + +EXPOSE 19530 + diff --git a/docker/deploy/ubuntu18.04/docker-entrypoint.sh b/docker/deploy/cpu/ubuntu18.04/docker-entrypoint.sh similarity index 99% rename from docker/deploy/ubuntu18.04/docker-entrypoint.sh rename to docker/deploy/cpu/ubuntu18.04/docker-entrypoint.sh index 446c174d74..12937df395 100755 --- a/docker/deploy/ubuntu18.04/docker-entrypoint.sh +++ b/docker/deploy/cpu/ubuntu18.04/docker-entrypoint.sh @@ -7,4 +7,3 @@ if [ "$1" == 'start' ]; then fi exec "$@" - diff --git a/docker/deploy/ubuntu16.04/Dockerfile b/docker/deploy/gpu/ubuntu16.04/Dockerfile similarity index 97% rename from docker/deploy/ubuntu16.04/Dockerfile rename to docker/deploy/gpu/ubuntu16.04/Dockerfile index c5ca0ab03e..ee5402d3a9 100644 --- a/docker/deploy/ubuntu16.04/Dockerfile +++ b/docker/deploy/gpu/ubuntu16.04/Dockerfile @@ -15,6 +15,8 @@ COPY ./docker-entrypoint.sh /opt COPY ./milvus /opt/milvus ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/milvus/lib" +WORKDIR /opt/milvus + ENTRYPOINT [ "/opt/docker-entrypoint.sh" ] CMD [ "start" ] diff --git a/docker/deploy/gpu/ubuntu16.04/docker-entrypoint.sh b/docker/deploy/gpu/ubuntu16.04/docker-entrypoint.sh new file mode 100755 index 0000000000..12937df395 --- /dev/null +++ b/docker/deploy/gpu/ubuntu16.04/docker-entrypoint.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -e + +if [ "$1" == 'start' ]; then + cd /opt/milvus/scripts && ./start_server.sh +fi + +exec "$@" diff --git a/docker/deploy/ubuntu18.04/Dockerfile b/docker/deploy/gpu/ubuntu18.04/Dockerfile similarity index 97% rename from docker/deploy/ubuntu18.04/Dockerfile rename to docker/deploy/gpu/ubuntu18.04/Dockerfile index 0d16ae46e1..0760fe527e 100644 --- a/docker/deploy/ubuntu18.04/Dockerfile +++ b/docker/deploy/gpu/ubuntu18.04/Dockerfile @@ -15,6 +15,8 @@ COPY ./docker-entrypoint.sh /opt COPY ./milvus /opt/milvus ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/milvus/lib" +WORKDIR /opt/milvus + ENTRYPOINT [ "/opt/docker-entrypoint.sh" ] CMD [ "start" ] diff --git a/docker/deploy/gpu/ubuntu18.04/docker-entrypoint.sh b/docker/deploy/gpu/ubuntu18.04/docker-entrypoint.sh new file mode 100755 index 0000000000..12937df395 --- /dev/null +++ b/docker/deploy/gpu/ubuntu18.04/docker-entrypoint.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -e + +if [ "$1" == 'start' ]; then + cd /opt/milvus/scripts && ./start_server.sh +fi + +exec "$@" diff --git a/install.md b/install.md index 4d2088a3be..6711b41f76 100644 --- a/install.md +++ b/install.md @@ -29,10 +29,15 @@ $ ./build.sh -t Release ``` By default, it will build CPU version. To build GPU version, add `-g` option -``` +```shell $ ./build.sh -g ``` +If you want to know the complete build options, run +```shell +$./build.sh -h +``` + When the build is completed, all the stuff that you need in order to run Milvus will be installed under `[Milvus root path]/core/milvus`. ## Launch Milvus server @@ -43,13 +48,13 @@ $ cd [Milvus root path]/core/milvus Add `lib/` directory to `LD_LIBRARY_PATH` -``` +```shell $ export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:[Milvus root path]/core/milvus/lib ``` Then start Milvus server: -``` +```shell $ cd scripts $ ./start_server.sh ``` @@ -65,7 +70,7 @@ $ ./stop_server.sh `protocol https not supported or disabled in libcurl`. First, make sure you have `libcurl4-openssl-dev` installed in your system. Then try reinstall CMake from source with `--system-curl` option: -``` +```shell $ ./bootstrap --system-curl $ make $ sudo make install diff --git a/tests/milvus-java-test/pom.xml b/tests/milvus-java-test/pom.xml index 4da715e292..bff6f1de61 100644 --- a/tests/milvus-java-test/pom.xml +++ b/tests/milvus-java-test/pom.xml @@ -99,7 +99,7 @@ io.milvus milvus-sdk-java - 0.2.0-SNAPSHOT + 0.3.0 @@ -134,4 +134,4 @@ - \ No newline at end of file + diff --git a/tests/milvus_python_test/test_ping.py b/tests/milvus_python_test/test_ping.py index d63ab93f11..3831dcd6ac 100644 --- a/tests/milvus_python_test/test_ping.py +++ b/tests/milvus_python_test/test_ping.py @@ -1,7 +1,7 @@ import logging import pytest -__version__ = '0.5.1' +__version__ = '0.6.0' class TestPing: