Skip some of the non-critical tests in ./tools/run_flash_bench.sh

Summary:
Some of the tests aren't considered to be critical when it comes to getting key benchmarking data for RocksDB. Therefore we'll introduce an environment variable `SKIP_LOW_PRI_TESTS` which enables skipping those test cases. By default all the tests will be run. If you want to optimize the test-case execution then do the following:

`
$ export SKIP_LOW_PRI_TESTS=1
$ ./tools/run_flash_bench.sh
`

Test Plan: Verified that when  `SKIP_LOW_PRI_TESTS` is not set then `benchmark.sh` is called for all the scenarios and when `SKIP_LOW_PRI_TESTS` is set to `1` then `benchmark.sh` is called only for the test-cases which are critical.

Reviewers: MarkCallaghan

Reviewed By: MarkCallaghan

Subscribers: dhruba, leveldb

Differential Revision: https://reviews.facebook.net/D53739
main
Gunnar Kudrjavets 9 years ago
parent 284aa613a7
commit a09ce4fcd3
  1. 131
      tools/run_flash_bench.sh

@ -22,6 +22,7 @@
# test and the tests are listed below.
#
# The environment variables are also optional. The variables are:
#
# NKEYS - number of key/value pairs to load
# BG_MBWRITEPERSEC - write rate limit in MB/second for tests in which
# there is one thread doing writes and stats are
@ -54,6 +55,10 @@
# SAVE_SETUP - saves a copy of the database at the end of step 1 to
# $DATA_DIR.bak. When LOG_DIR != DATA_DIR then it is copied
# to $LOG_DIR.bak.
# SKIP_LOW_PRI_TESTS - skip some of the tests which aren't crucial for getting
# actionable benchmarking data (look for keywords "bulkload",
# "sync=1", and "while merging").
#
# Size constants
K=1024
@ -89,6 +94,14 @@ wal_dir=${LOG_DIR:-"/tmp/rocksdb/"}
do_setup=${DO_SETUP:-1}
save_setup=${SAVE_SETUP:-0}
# By default we'll run all the tests. Set this to skip a set of tests which
# aren't critical for getting key metrics.
skip_low_pri_tests=${SKIP_LOW_PRI_TESTS:-0}
if [[ $skip_low_pri_tests == 1 ]]; then
echo "Skipping some non-critical tests because SKIP_LOW_PRI_TESTS is set."
fi
output_dir="/tmp/output"
ARGS="\
@ -116,8 +129,10 @@ echo -e "ops/sec\tmb/sec\tSize-GB\tL0_GB\tSum_GB\tW-Amp\tW-MB/s\tusec/op\tp50\tp
if [[ $do_setup != 0 ]]; then
echo Doing setup
# Test 1: bulk load
env $ARGS ./tools/benchmark.sh bulkload
if [[ $skip_low_pri_tests != 1 ]]; then
# Test 1: bulk load
env $ARGS ./tools/benchmark.sh bulkload
fi
# Test 2a: sequential fill with large values to get peak ingest
# adjust NUM_KEYS given the use of larger values
@ -188,16 +203,20 @@ for num_thr in "${nthreads[@]}" ; do
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$fg_mbwps \
DB_BENCH_NO_SYNC=1 ./tools/benchmark.sh overwrite
# Test 8: overwrite with sync=1
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$fg_mbwps \
./tools/benchmark.sh overwrite
if [[ $skip_low_pri_tests != 1 ]]; then
# Test 8: overwrite with sync=1
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$fg_mbwps \
./tools/benchmark.sh overwrite
fi
# Test 9: random update with sync=0
env $ARGS DURATION=$duration NUM_THREADS=$num_thr DB_BENCH_NO_SYNC=1 \
./tools/benchmark.sh updaterandom
# Test 10: random update with sync=1
env $ARGS DURATION=$duration NUM_THREADS=$num_thr ./tools/benchmark.sh updaterandom
if [[ $skip_low_pri_tests != 1 ]]; then
# Test 10: random update with sync=1
env $ARGS DURATION=$duration NUM_THREADS=$num_thr ./tools/benchmark.sh updaterandom
fi
# Test 11: random read while writing
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$bg_mbwps \
@ -219,73 +238,105 @@ for num_thr in "${nthreads[@]}" ; do
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$fg_mbwps \
DB_BENCH_NO_SYNC=1 ./tools/benchmark.sh mergerandom
# Test 15: random merge with sync=1
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$fg_mbwps \
./tools/benchmark.sh mergerandom
if [[ $skip_low_pri_tests != 1 ]]; then
# Test 15: random merge with sync=1
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$fg_mbwps \
./tools/benchmark.sh mergerandom
# Test 16: random read while merging
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$bg_mbwps \
DB_BENCH_NO_SYNC=1 ./tools/benchmark.sh readwhilemerging
# Test 16: random read while merging
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$bg_mbwps \
DB_BENCH_NO_SYNC=1 ./tools/benchmark.sh readwhilemerging
# Test 17: range scan while merging
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$bg_mbwps \
DB_BENCH_NO_SYNC=1 NUM_NEXTS_PER_SEEK=$nps ./tools/benchmark.sh fwdrangewhilemerging
# Test 17: range scan while merging
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$bg_mbwps \
DB_BENCH_NO_SYNC=1 NUM_NEXTS_PER_SEEK=$nps ./tools/benchmark.sh fwdrangewhilemerging
# Test 18: reverse range scan while merging
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$bg_mbwps \
DB_BENCH_NO_SYNC=1 NUM_NEXTS_PER_SEEK=$nps ./tools/benchmark.sh revrangewhilemerging
# Test 18: reverse range scan while merging
env $ARGS DURATION=$duration NUM_THREADS=$num_thr MB_WRITE_PER_SEC=$bg_mbwps \
DB_BENCH_NO_SYNC=1 NUM_NEXTS_PER_SEEK=$nps ./tools/benchmark.sh revrangewhilemerging
fi
done
echo bulkload > $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep bulkload $output_dir/report.txt >> $output_dir/report2.txt
if [[ $skip_low_pri_tests != 1 ]]; then
echo bulkload > $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep bulkload $output_dir/report.txt >> $output_dir/report2.txt
fi
echo fillseq >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep fillseq $output_dir/report.txt >> $output_dir/report2.txt
echo overwrite sync=0 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep overwrite $output_dir/report.txt | grep \.s0 >> $output_dir/report2.txt
echo overwrite sync=1 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep overwrite $output_dir/report.txt | grep \.s1 >> $output_dir/report2.txt
if [[ $skip_low_pri_tests != 1 ]]; then
echo overwrite sync=1 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep overwrite $output_dir/report.txt | grep \.s1 >> $output_dir/report2.txt
fi
echo updaterandom sync=0 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep updaterandom $output_dir/report.txt | grep \.s0 >> $output_dir/report2.txt
echo updaterandom sync=1 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep updaterandom $output_dir/report.txt | grep \.s1 >> $output_dir/report2.txt
if [[ $skip_low_pri_tests != 1 ]]; then
echo updaterandom sync=1 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep updaterandom $output_dir/report.txt | grep \.s1 >> $output_dir/report2.txt
fi
echo mergerandom sync=0 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep mergerandom $output_dir/report.txt | grep \.s0 >> $output_dir/report2.txt
echo mergerandom sync=1 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep mergerandom $output_dir/report.txt | grep \.s1 >> $output_dir/report2.txt
if [[ $skip_low_pri_tests != 1 ]]; then
echo mergerandom sync=1 >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep mergerandom $output_dir/report.txt | grep \.s1 >> $output_dir/report2.txt
fi
echo readrandom >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep readrandom $output_dir/report.txt >> $output_dir/report2.txt
echo fwdrange >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep fwdrange\.t $output_dir/report.txt >> $output_dir/report2.txt
echo revrange >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep revrange\.t $output_dir/report.txt >> $output_dir/report2.txt
echo readwhile >> $output_dir/report2.txt >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep readwhilewriting $output_dir/report.txt >> $output_dir/report2.txt
echo readwhile >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep readwhilemerging $output_dir/report.txt >> $output_dir/report2.txt
if [[ $skip_low_pri_tests != 1 ]]; then
echo readwhile >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep readwhilemerging $output_dir/report.txt >> $output_dir/report2.txt
fi
echo fwdreadwhilewriting >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep fwdrangewhilewriting $output_dir/report.txt >> $output_dir/report2.txt
echo fwdreadwhilemerging >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep fwdrangewhilemerg $output_dir/report.txt >> $output_dir/report2.txt
if [[ $skip_low_pri_tests != 1 ]]; then
echo fwdreadwhilemerging >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep fwdrangewhilemerg $output_dir/report.txt >> $output_dir/report2.txt
fi
echo revreadwhilewriting >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep revrangewhilewriting $output_dir/report.txt >> $output_dir/report2.txt
echo revreadwhilemerging >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep revrangewhilemerg $output_dir/report.txt >> $output_dir/report2.txt
if [[ $skip_low_pri_tests != 1 ]]; then
echo revreadwhilemerging >> $output_dir/report2.txt
head -1 $output_dir/report.txt >> $output_dir/report2.txt
grep revrangewhilemerg $output_dir/report.txt >> $output_dir/report2.txt
fi
cat $output_dir/report2.txt

Loading…
Cancel
Save