Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add ci for standalone compile and ut. #3637

Merged
merged 12 commits into from
Jan 26, 2022
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 89 additions & 0 deletions .github/workflows/pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -199,3 +199,92 @@ jobs:
with:
name: ${{ matrix.os }}-${{ matrix.compiler }}-nebula-test-logs
path: ./build/server_*/logs/

standalone:
Shylock-Hg marked this conversation as resolved.
Show resolved Hide resolved
name: standalone-build
needs: lint
runs-on: [self-hosted, nebula]
strategy:
fail-fast: false
matrix:
os:
- centos7
compiler:
- gcc-9.3
env:
CCACHE_DIR: /tmp/ccache/nebula/${{ matrix.os }}-${{ matrix.compiler }}
CCACHE_MAXSIZE: 8G
container:
image: vesoft/nebula-dev:${{ matrix.os }}
volumes:
- /tmp/ccache/nebula/${{ matrix.os }}-${{ matrix.compiler }}:/tmp/ccache/nebula/${{ matrix.os }}-${{ matrix.compiler }}
options: --cap-add=SYS_PTRACE
steps:
- uses: webiny/[email protected]
with:
run: sh -c "find . -mindepth 1 -delete"
- uses: actions/checkout@v2
- name: Prepare environment
id: prepare
run: |
[ -d build/ ] && rm -rf build/* || mkdir -p build
make init -C tests
- name: CMake
id: cmake
run: |
case ${{ matrix.compiler }} in
gcc-*)
case ${{ matrix.os }} in
centos7)
# build with Release type
cmake \
-DCMAKE_CXX_COMPILER=$TOOLSET_GCC_DIR/bin/g++ \
-DCMAKE_C_COMPILER=$TOOLSET_GCC_DIR/bin/gcc \
-DCMAKE_BUILD_TYPE=Release \
-DENABLE_TESTING=on \
-DENABLE_STANDALONE_VERSION=on \
-GNinja \
-B build
echo "::set-output name=j::10"
;;
esac
;;
esac
- name: Make
run: |
ccache -z
ninja -j $(nproc)
ccache -s
working-directory: build/
- name: CTest
SuperYoko marked this conversation as resolved.
Show resolved Hide resolved
env:
ASAN_OPTIONS: fast_unwind_on_malloc=1
run: ctest -j $(($(nproc)/2+1)) --timeout 400 --output-on-failure
working-directory: build/
timeout-minutes: 20
- name: Setup Cluster
run: |
make standalone-up
working-directory: tests/
timeout-minutes: 60
- name: TCK
run: |
make RM_DIR=false DEBUG=false J=${{ steps.cmake.outputs.j }} standalone-tck
working-directory: tests/
timeout-minutes: 60
- name: LDBC
run: |
make RM_DIR=false DEBUG=false J=${{ steps.cmake.outputs.j }} ldbc
working-directory: tests/
timeout-minutes: 60
- name: Down cluster
run: |
make RM_DIR=false down
working-directory: tests/
timeout-minutes: 2
- name: Upload logs
uses: actions/upload-artifact@v2
if: ${{ failure() }}
with:
name: ${{ matrix.os }}-${{ matrix.compiler }}-nebula-test-logs
path: ./build/server_*/logs/
14 changes: 8 additions & 6 deletions src/clients/storage/StorageClientBase-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -240,16 +240,18 @@ void StorageClientBase<ClientType, ClientManagerType>::getResponseImpl(
DCHECK(!!ioThreadPool_);
evb = ioThreadPool_->getEventBase();
}
auto reqPtr = std::make_shared<std::pair<HostAddr, Request>>(std::move(request.first),
SuperYoko marked this conversation as resolved.
Show resolved Hide resolved
std::move(request.second));
folly::via(
evb,
[evb, request = std::move(request), remoteFunc = std::move(remoteFunc), pro, this]() mutable {
auto host = request.first;
[evb, request = std::move(reqPtr), remoteFunc = std::move(remoteFunc), pro, this]() mutable {
auto host = request->first;
auto client = clientsMan_->client(host, evb, false, FLAGS_storage_client_timeout_ms);
auto spaceId = request.second.get_space_id();
auto partsId = getReqPartsId(request.second);
remoteFunc(client.get(), request.second)
auto spaceId = request->second.get_space_id();
auto partsId = getReqPartsId(request->second);
remoteFunc(client.get(), request->second)
.via(evb)
.thenValue([spaceId, pro, this](Response&& resp) mutable {
.thenValue([spaceId, pro, request, this](Response&& resp) mutable {
auto& result = resp.get_result();
for (auto& code : result.get_failed_parts()) {
VLOG(3) << "Failure! Failed part " << code.get_part_id() << ", failed code "
Expand Down
1 change: 1 addition & 0 deletions src/daemons/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,7 @@ nebula_add_executable(
$<TARGET_OBJECTS:meta_http_handler>
$<TARGET_OBJECTS:meta_version_man_obj>
$<TARGET_OBJECTS:meta_data_upgrade_obj>
$<TARGET_OBJECTS:meta_v2_thrift_obj>
${storage_meta_deps}
${common_deps}
LIBRARIES
Expand Down
7 changes: 7 additions & 0 deletions src/daemons/StandAloneDaemon.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,13 @@ int main(int argc, char *argv[]) {
return EXIT_FAILURE;
}

// load the time zone data
status = nebula::time::Timezone::init();
if (!status.ok()) {
LOG(ERROR) << status;
return EXIT_FAILURE;
}

// Initialize the global timezone, it's only used for datetime type compute
// won't affect the process timezone.
status = nebula::time::Timezone::initializeGlobalTimezone();
Expand Down
1 change: 0 additions & 1 deletion src/graph/executor/test/StorageServerStub.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ std::shared_ptr<GraphStorageLocalServer> instance_ = nullptr;

void GraphStorageLocalServer::setThreadManager(
std::shared_ptr<apache::thrift::concurrency::ThreadManager> threadManager) {
// lock?
threadManager_ = threadManager;
}

Expand Down
1 change: 0 additions & 1 deletion src/storage/GraphStorageLocalServer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ std::shared_ptr<GraphStorageLocalServer> instance_ = nullptr;

void GraphStorageLocalServer::setThreadManager(
std::shared_ptr<apache::thrift::concurrency::ThreadManager> threadManager) {
// lock?
threadManager_ = threadManager;
}

Expand Down
2 changes: 2 additions & 0 deletions src/storage/stats/StorageStats.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,10 @@ void initStorageStats() {
kNumTagsDeleted = stats::StatsManager::registerStats("num_tags_deleted", "rate, sum");
kNumVerticesDeleted = stats::StatsManager::registerStats("num_vertices_deleted", "rate, sum");

#ifndef BUILD_STANDALONE
initMetaClientStats();
initKVStats();
#endif
}

} // namespace nebula
19 changes: 19 additions & 0 deletions tests/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,10 @@ PASSWORD_LOCK_TIME_IN_SECS ?= 0
gherkin_fmt = ~/.local/bin/reformat-gherkin
run_test = PYTHONPATH=$$PYTHONPATH:$(CURR_DIR)/.. $(CURR_DIR)/nebula-test-run.py
test_without_skip = python3 -m pytest -m "not skip"
test_without_skip_sa = python3 -m pytest -m "not skip and not distonly"
test_j = $(test_without_skip) -n$(J)
test_j_sa = $(test_without_skip_sa) -n$(J)


install-deps:
pip3 install --user -U setuptools wheel -i $(PYPI_MIRROR)
Expand Down Expand Up @@ -73,6 +76,18 @@ up: clean
--ca_signed=$(CA_SIGNED) \
--containerized=$(CONTAINERIZED)

standalone-up: clean
@mkdir -p $(CURR_DIR)/.pytest
$(run_test) --cmd=start_standalone \
--build_dir=$(BUILD_DIR) \
--debug=$(DEBUG) \
--multi_graphd=false \
--enable_ssl=$(ENABLE_SSL) \
--enable_graph_ssl=$(ENABLE_GRAPH_SSL) \
--enable_meta_ssl=$(ENABLE_META_SSL) \
--ca_signed=$(CA_SIGNED) \
--containerized=$(CONTAINERIZED)

down:
$(run_test) --cmd=stop --rm_dir=$(RM_DIR)

Expand All @@ -92,13 +107,17 @@ slow-query: currdir
$(test_j) tck/steps/test_kill_slow_query_via_same_service.py && \
$(test_j) tck/steps/test_kill_slow_query_via_different_service.py

standalone-tck: jobs
$(test_j_sa) tck/steps/test_tck.py

tck: jobs slow-query
$(test_j) tck/steps/test_tck.py

ldbc: currdir
$(test_j) tck/steps/test_ldbc.py

test-all: test tck ldbc
test-standalone-all: standalone-tck ldbc

fail: currdir
python3 -m pytest \
Expand Down
Loading