Skip to content

Add a doc page for data mgmt (#13398) #1

Add a doc page for data mgmt (#13398)

Add a doc page for data mgmt (#13398) #1

Workflow file for this run

name: Benchmark
on:
push:
branches: [ main, extensions ]
workflow_dispatch:
permissions:
# deployments permission to deploy GitHub pages website
deployments: write
# contents permission to update benchmark contents in gh-pages branch
contents: write
env:
CARGO_TERM_COLOR: always
# Disable incremental compilation.
#
# Incremental compilation is useful as part of an edit-build-test-edit cycle,
# as it lets the compiler avoid recompiling code that hasn't changed. However,
# on CI, we're not making small edits; we're almost always building the entire
# project from scratch. Thus, incremental compilation on CI actually
# introduces *additional* overhead to support making future builds
# faster...but no future builds will ever occur in any given CI environment.
#
# See https://matklad.github.io/2021/09/04/fast-rust-builds.html#ci-workflow
# for details.
CARGO_INCREMENTAL: 0
# Allow more retries for network requests in cargo (downloading crates) and
# rustup (installing toolchains). This should help to reduce flaky CI failures
# from transient network timeouts or other issues.
CARGO_NET_RETRY: 10
RUSTUP_MAX_RETRIES: 10
# Don't emit giant backtraces in the CI logs.
RUST_BACKTRACE: short
jobs:
diff:
runs-on: [ubuntu-latest]
outputs:
isRust: ${{ steps.diff.outputs.isRust }}
steps:
- uses: actions/checkout@7dd9e2a3dc350cf687eb1b2a4fadfee8c8e49675 # pin@v3
- name: Detect Changes
uses: './.github/actions/diffs'
id: diff
bench:
needs: diff
if: github.event.pull_request.draft == false && needs.diff.outputs.isRust == 'true'
runs-on: [ubuntu-ghcloud]
steps:
- uses: actions/checkout@7dd9e2a3dc350cf687eb1b2a4fadfee8c8e49675 # pin@v3
# Turn off the caching on self-hosted jobs
# Enable caching of the 'librocksdb-sys' crate by additionally caching the
# 'librocksdb-sys' src directory which is managed by cargo
# - uses: bmwill/rust-cache@v1 # Fork of 'Swatinem/rust-cache' which allows caching additional paths
# with:
# path: ~/.cargo/registry/src/**/librocksdb-sys-*
- name: Install huniq
uses: actions-rs/install@9da1d2adcfe5e7c16992e8242ca33a56b6d9b101 # [email protected]
with:
crate: huniq
- name: Install python dependencies
run: |
pip install pyopenssl --upgrade
if [ -f narwhal/benchmark/requirements.txt ]; then pip install -r narwhal/benchmark/requirements.txt; fi
- name: Prepare artifact directory
run: |
mkdir -p artifacts
- name: Compile benchmark
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # pin@v1
with:
command: build
args: --release
- name: Run benchmarks
run: |
set -o pipefail
cargo run --release --package sui-benchmark --bin stress -- --log-path /tmp/stress.log --num-client-threads 16 --num-server-threads 32 --num-transfer-accounts 2 bench --committee-size 4 --target-qps 1000 --num-workers 16 --transfer-object 100 --run-duration 60s --stress-stat-collection 2>&1 | huniq | tee -a artifacts/owned_4_500.txt
cargo run --release --package sui-benchmark --bin stress -- --log-path /tmp/stress.log --num-client-threads 16 --num-server-threads 32 --num-transfer-accounts 2 bench --committee-size 4 --target-qps 1000 --num-workers 16 --shared-counter 100 --run-duration 60s --stress-stat-collection 2>&1 | huniq | tee -a artifacts/shared_4_500.txt
cargo run --release --package sui-benchmark --bin stress -- --log-path /tmp/stress.log --num-client-threads 16 --num-server-threads 32 --num-transfer-accounts 2 bench --committee-size 20 --target-qps 200 --num-workers 16 --transfer-object 100 --run-duration 60s --stress-stat-collection 2>&1 | huniq | tee -a artifacts/owned_20_150.txt
cargo run --release --package sui-benchmark --bin stress -- --log-path /tmp/stress.log --num-client-threads 16 --num-server-threads 32 --num-transfer-accounts 2 bench --committee-size 20 --target-qps 200 --num-workers 16 --shared-counter 100 --run-duration 60s --stress-stat-collection 2>&1 | huniq | tee -a artifacts/shared_20_50.txt
pushd narwhal/benchmark && fab local | tee -a ../../artifacts/narwhal.txt && popd
- name: Retrieve benchmark results
id: get-comment-body
run: |
delimiter="$(openssl rand -hex 8)"
owned_4_500="$(cat artifacts/owned_4_500.txt | grep -e 'Benchmark Report:' -A 1000)"
echo "owned_4_500<<$delimiter" >> $GITHUB_OUTPUT
echo "$owned_4_500" >> $GITHUB_OUTPUT
echo "$delimiter" >> $GITHUB_OUTPUT
shared_4_500="$(cat artifacts/shared_4_500.txt | grep -e 'Benchmark Report:' -A 1000)"
echo "shared_4_500<<$delimiter" >> $GITHUB_OUTPUT
echo "$shared_4_500" >> $GITHUB_OUTPUT
echo "$delimiter" >> $GITHUB_OUTPUT
owned_20_150="$(cat artifacts/owned_20_150.txt | grep -e 'Benchmark Report:' -A 1000)"
echo "owned_20_150<<$delimiter" >> $GITHUB_OUTPUT
echo "$owned_20_150" >> $GITHUB_OUTPUT
echo "$delimiter" >> $GITHUB_OUTPUT
shared_20_50="$(cat artifacts/shared_20_50.txt | grep -e 'Benchmark Report:' -A 1000)"
echo "shared_20_50<<$delimiter" >> $GITHUB_OUTPUT
echo "$shared_20_50" >> $GITHUB_OUTPUT
echo "$delimiter" >> $GITHUB_OUTPUT
narwhal="$(cat artifacts/narwhal.txt | grep -e 'SUMMARY:' -A 1000)"
echo "narwhal<<$delimiter" >> $GITHUB_OUTPUT
echo "$narwhal" >> $GITHUB_OUTPUT
echo "$delimiter" >> $GITHUB_OUTPUT
- name: Post commit comment
uses: peter-evans/commit-comment@76d2ae14b83cd171cd38507097b9616bb9ca7cb6 # [email protected]
with:
token: ${{ secrets.GITHUB_TOKEN }}
body: |
**4 Validators 500/s Owned Transactions Benchmark Results**
```
${{ steps.get-comment-body.outputs.owned_4_500 }}
```
**4 Validators 500/s Shared Transactions Benchmark Results**
```
${{ steps.get-comment-body.outputs.shared_4_500 }}
```
**20 Validators 50/s Owned Transactions Benchmark Results**
```
${{ steps.get-comment-body.outputs.owned_20_150 }}
```
**20 Validators 50/s Shared Transactions Benchmark Results**
```
${{ steps.get-comment-body.outputs.shared_20_50 }}
```
**Narwhal Benchmark Results**
```
${{ steps.get-comment-body.outputs.narwhal }}
```
indexer-bench:
name: indexer
needs: diff
if: needs.diff.outputs.isRust == 'true'
runs-on: ubuntu-ghcloud
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: postgrespw
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v3
- name: Run indexer benchmark
run: |
cargo bench --bench indexer_benchmark -- --output-format bencher | tee output.txt
env:
POSTGRES_HOST: localhost
POSTGRES_PORT: 5432
- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
tool: "cargo"
output-file-path: output.txt
benchmark-data-dir-path: "bench/indexer"
github-token: ${{ secrets.GITHUB_TOKEN }}
auto-push: true
# Show alert with commit comment on detecting possible performance regression
alert-threshold: '200%'
comment-on-alert: true
fail-on-alert: true