diff --git a/package/package.sh b/package/package.sh index 70e686873b3db1ef5cb3bce29c940c2682f291b0..c4768a7ff44b4b056180bd742178d8244f7c25cc 100755 --- a/package/package.sh +++ b/package/package.sh @@ -19,11 +19,13 @@ package_one=ON strip_enable="FALSE" usage="Usage: ${0} -v <version> -n <ON/OFF> -s <TRUE/FALSE> -b <BRANCH> -g <ON/OFF>" project_dir="$(cd "$(dirname "$0")" && pwd)/.." -build_dir=${project_dir}/build +modules_dir=${project_dir}/modules +storage_dir=${modules_dir}/storage enablesanitizer="OFF" static_sanitizer="OFF" build_type="Release" branch="master" +jobs=$(nproc) while getopts v:n:s:b:d:t:g: opt; do @@ -79,24 +81,35 @@ fi echo "current version is [ $version ], strip enable is [$strip_enable], enablesanitizer is [$enablesanitizer], static_sanitizer is [$static_sanitizer]" -# args: <version> -function build { - version=$1 - san=$2 - ssan=$3 - build_type=$4 - branch=$5 - modules_dir=${project_dir}/modules - if [[ -d $build_dir ]]; then - rm -rf ${build_dir}/* - else - mkdir ${build_dir} +function _build_storage { + if [ ! -d ${storage_dir} ]; then + git clone --single-branch --branch ${branch} https://github.com/vesoft-inc/nebula-storage.git ${storage_dir} fi - mkdir -p ${build_dir} + rm -rf ${storage_dir}/build && mkdir -p ${storage_dir}/build + cmake -DCMAKE_BUILD_TYPE=${build_type} \ + -DNEBULA_BUILD_VERSION=${version} \ + -DENABLE_ASAN=${san} \ + -DENABLE_UBSAN=${san} \ + -DENABLE_STATIC_ASAN=${ssan} \ + -DENABLE_STATIC_UBSAN=${ssan} \ + -DCMAKE_INSTALL_PREFIX=/usr/local/nebula \ + -DNEBULA_COMMON_REPO_TAG=${branch} \ + -DENABLE_TESTING=OFF \ + -DENABLE_PACK_ONE=${package_one} \ + -S ${storage_dir} \ + -B ${storage_dir}/build - pushd ${build_dir} + if !( cmake --build ${storage_dir}/build -j ${jobs} ); then + echo ">>> build nebula storage failed <<<" + exit -1 + fi + echo ">>> build nebula storage successfully <<<" +} +function _build_graph { + build_dir=${project_dir}/build + rm -rf ${build_dir} && mkdir -p ${build_dir} cmake -DCMAKE_BUILD_TYPE=${build_type} \ -DNEBULA_BUILD_VERSION=${version} \ -DENABLE_ASAN=${san} \ @@ -105,18 +118,31 @@ function build { -DENABLE_STATIC_UBSAN=${ssan} \ -DCMAKE_INSTALL_PREFIX=/usr/local/nebula \ -DNEBULA_COMMON_REPO_TAG=${branch} \ - -DNEBULA_STORAGE_REPO_TAG=${branch} \ -DENABLE_TESTING=OFF \ - -DENABLE_BUILD_STORAGE=${build_storage} \ + -DENABLE_BUILD_STORAGE=OFF \ -DENABLE_PACK_ONE=${package_one} \ - $project_dir + -S ${project_dir} \ + -B ${build_dir} - if !( make -j$(nproc) ); then - echo ">>> build nebula failed <<<" + if !( cmake --build ${build_dir} -j ${jobs} ); then + echo ">>> build nebula graph failed <<<" exit -1 fi + echo ">>> build nebula graph successfully <<<" +} - popd +# args: <version> +function build { + version=$1 + san=$2 + ssan=$3 + build_type=$4 + branch=$5 + + if [[ "$build_storage" == "ON" ]]; then + _build_storage + fi + _build_graph } # args: <strip_enable> @@ -134,6 +160,8 @@ function package { -DENABLE_PACK_ONE=${package_one} \ -DCMAKE_INSTALL_PREFIX=/usr/local/nebula \ -DENABLE_PACKAGE_STORAGE=${build_storage} \ + -DNEBULA_STORAGE_SOURCE_DIR=${storage_dir} \ + -DNEBULA_STORAGE_BINARY_DIR=${storage_dir}/build \ ${project_dir}/package/ strip_enable=$1 diff --git a/src/executor/query/GetVerticesExecutor.cpp b/src/executor/query/GetVerticesExecutor.cpp index e05eda2a8c9c7ddd8cb3ee5e347509b3e3fdbc74..e78c3f168d6b6833eb74f0aeb06fb47edc1f066e 100644 --- a/src/executor/query/GetVerticesExecutor.cpp +++ b/src/executor/query/GetVerticesExecutor.cpp @@ -61,9 +61,8 @@ folly::Future<Status> GetVerticesExecutor::getVertices() { } DataSet GetVerticesExecutor::buildRequestDataSet(const GetVertices* gv) { - nebula::DataSet vertices({kVid}); if (gv == nullptr) { - return vertices; + return nebula::DataSet({kVid}); } // Accept Table such as | $a | $b | $c |... as input which one column indicate src auto valueIter = ectx_->getResult(gv->inputVar()).iter(); diff --git a/tests/README.md b/tests/README.md index f273dcc481b9bf0d6f25f60970de92f9470e7783..82a13f3d6c544115c5ed16a96a071b6db21bca23 100644 --- a/tests/README.md +++ b/tests/README.md @@ -61,6 +61,15 @@ We also provide a parameter named `address` to allow these tests to connect to t $ pytest --address="192.168.0.1:9669" -m 'not skip' . ``` +You can use following commands to only rerun the test cases if they failed: + +```shell +$ pytest --last-failed --gherkin-terminal-reporter --gherkin-terminal-reporter-expanded . +``` + +`gherkin-terminal-reporter` options will print the pytest report prettily. + + ### Stop nebula servers Following command will stop the nebula servers started in above steps: @@ -83,7 +92,7 @@ $ make clean ## How to add test case -You can find all nebula test cases in [these feature files](tck/features) and some openCypher cases in [other files](tck/openCypher/features). Some references about [TCK](https://github.com/opencypher/openCypher/tree/master/tck) may be what you need. +You can find all nebula test cases in [tck/features](tck/features) and some openCypher cases in [tck/openCypher/features](tck/openCypher/features). Some references about [TCK](https://github.com/opencypher/openCypher/tree/master/tck) may be what you need. The test cases are organized in feature files and described in gherkin language. The structure of feature file is like following example: diff --git a/tests/tck/features/mutate/InsertWithTimeType.feature b/tests/tck/features/mutate/InsertWithTimeType.feature index 59bc42192086ec5e9322eb15b968c271f3fd8295..6dde75b6774ac48115f1b4c7b3dcc50f546d90fa 100644 --- a/tests/tck/features/mutate/InsertWithTimeType.feature +++ b/tests/tck/features/mutate/InsertWithTimeType.feature @@ -25,14 +25,19 @@ Feature: Insert with time-dependent types INSERT VERTEX TAG_TIMESTAMP(a) VALUES "TEST_VERTEX":("2000.0.0 10:0:0") """ Then a ExecutionError should be raised at runtime:Storage Error: The data type does not meet the requirements. Use the correct type of data. + When try to execute query: + """ + INSERT VERTEX TAG_TIME(a) VALUES "TEST_VERTEX":(NULL) + """ + Then the execution should be successful When executing query: """ INSERT VERTEX TAG_TIME(a) VALUES "TEST_VERTEX":("10:0:0") """ Then a ExecutionError should be raised at runtime:Storage Error: The data type does not meet the requirements. Use the correct type of data. - When executing query: + When try to execute query: """ - INSERT VERTEX TAG_TIME(a) VALUES "TEST_VERTEX":(NULL) + INSERT VERTEX TAG_DATE(a) VALUES "TEST_VERTEX":(NULL) """ Then the execution should be successful When executing query: @@ -40,9 +45,9 @@ Feature: Insert with time-dependent types INSERT VERTEX TAG_DATE(a) VALUES "TEST_VERTEX":("2000.0.0") """ Then a ExecutionError should be raised at runtime:Storage Error: The data type does not meet the requirements. Use the correct type of data. - When executing query: + When try to execute query: """ - INSERT VERTEX TAG_DATE(a) VALUES "TEST_VERTEX":(NULL) + INSERT VERTEX TAG_DATETIME(a) VALUES "TEST_VERTEX":(NULL) """ Then the execution should be successful When executing query: @@ -50,11 +55,6 @@ Feature: Insert with time-dependent types INSERT VERTEX TAG_DATETIME(a) VALUES "TEST_VERTEX":("2000.0.0") """ Then a ExecutionError should be raised at runtime:Storage Error: The data type does not meet the requirements. Use the correct type of data. - When executing query: - """ - INSERT VERTEX TAG_DATETIME(a) VALUES "TEST_VERTEX":(NULL) - """ - Then the execution should be successful And drop the used space Scenario: Basic CRUD for timestamp type