@ -35,21 +35,24 @@ if [ ! -z $DB_BENCH_NO_SYNC ]; then
syncval = "0" ;
fi
num_read_threads = ${ NUM_READ_THREADS :- 16 }
# Only for readwhilewriting, readwhilemerging
writes_per_second = ${ WRITES_PER_SEC :- $(( 80 * K)) }
num_nexts_per_seek = ${ NUM_NEXTS_PER_SEEK :- 10 } # (only for rangescanwhilewriting)
cache_size = $(( 1 * G))
num_threads = ${ NUM_THREADS :- 16 }
# Only for *whilewriting, *whilemerging
writes_per_second = ${ WRITES_PER_SECOND :- $(( 10 * K)) }
# Only for tests that do range scans
num_nexts_per_seek = ${ NUM_NEXTS_PER_SEEK :- 10 }
cache_size = ${ CACHE_SIZE :- $(( 1 * G)) }
duration = ${ DURATION :- 0 }
num_keys = ${ NUM_KEYS :- $(( 1 * G)) }
key_size = 20
value_size = 800
value_size = ${ VALUE_SIZE :- 400 }
const_params = "
--db= $DB_DIR \
--wal_dir= $WAL_DIR \
--disable_data_sync= 0 \
\
--num= $num_keys \
--num_levels= 6 \
--key_size= $key_size \
--value_size= $value_size \
@ -57,10 +60,10 @@ const_params="
--cache_size= $cache_size \
--cache_numshardbits= 6 \
--compression_type= zlib \
--min_level_to_compress= 2 \
--min_level_to_compress= 3 \
--compression_ratio= 0.5 \
\
--hard_rate_limit= 2 \
--hard_rate_limit= 3 \
--rate_limit_delay_max_milliseconds= 1000000 \
--write_buffer_size= $(( 128 * M)) \
--max_write_buffer_number= 3 \
@ -69,7 +72,8 @@ const_params="
\
--verify_checksum= 1 \
--delete_obsolete_files_period_micros= $(( 60 * M)) \
--max_grandparent_overlap_factor= 10 \
--max_grandparent_overlap_factor= 8 \
--max_bytes_for_level_multiplier= 8 \
\
--statistics= 1 \
--stats_per_interval= 1 \
@ -82,158 +86,173 @@ const_params="
l0_config = "
--level0_file_num_compaction_trigger= 4 \
--level0_slowdown_writes_trigger= 8 \
--level0_stop_writes_trigger= 1 2"
--level0_slowdown_writes_trigger= 12 \
--level0_stop_writes_trigger= 20 "
if [ $duration -gt 0 ] ; then
const_params = " $const_params --duration= $duration "
fi
params_r = " $const_params $l0_config --max_background_compactions=4 --max_background_flushes=1 "
params_w = " $const_params $l0_config --max_background_compactions=16 --max_background_flushes=16 "
params_bulkload = " $const_params --max_background_compactions=16 --max_background_flushes=16 \
--level0_file_num_compaction_trigger= $(( 10 * M)) \
--level0_slowdown_writes_trigger= $(( 10 * M)) \
--level0_stop_writes_trigger= $(( 10 * M)) "
function summarize_result {
test_out = $1
test_name = $2
bench_name = $3
uptime = $( grep ^Uptime\( secs $test_out | tail -1 | awk '{ printf "%.0f", $2 }' )
stall_time = $( grep "^Cumulative stall" $test_out | tail -1 | awk '{ print $3 }' )
stall_pct = $( grep "^Cumulative stall" $test_out | tail -1 | awk '{ print $5 }' )
ops_sec = $( grep ^${ bench_name } $test_out | awk '{ print $5 }' )
mb_sec = $( grep ^${ bench_name } $test_out | awk '{ print $7 }' )
lo_wgb = $( grep "^ L0" $test_out | tail -1 | awk '{ print $8 }' )
sum_wgb = $( grep "^ Sum" $test_out | tail -1 | awk '{ print $8 }' )
wamp = $( echo " scale=1; $sum_wgb / $lo_wgb " | bc )
wmb_ps = $( echo " scale=1; ( $sum_wgb * 1024.0 ) / $uptime " | bc )
usecs_op = $( grep ^${ bench_name } $test_out | awk '{ printf "%.1f", $3 }' )
p50 = $( grep "^Percentiles:" $test_out | awk '{ printf "%.1f", $3 }' )
p75 = $( grep "^Percentiles:" $test_out | awk '{ printf "%.1f", $5 }' )
p99 = $( grep "^Percentiles:" $test_out | awk '{ printf "%.1f", $7 }' )
echo -e " $ops_sec \t $mb_sec \t $lo_wgb \t $sum_wgb \t $wamp \t $wmb_ps \t $usecs_op \t $p50 \t $p75 \t $p99 \t $uptime \t $stall_time \t $stall_pct \t $test_name " \
>> $output_dir /report.txt
}
function run_bulkload {
echo " Bulk loading $num_keys random keys into database... "
cmd = " ./db_bench $params_bulkload --benchmarks=fillrandom \
echo " Bulk loading $num_keys random keys "
cmd = " ./db_bench --benchmarks=fillrandom \
--use_existing_db= 0 \
--num= $num_keys \
--disable_auto_compactions= 1 \
--sync= 0 \
--disable_data_sync= 0 \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_bulkload_fillrandom.log"
$params_bulkload \
--threads= 1 \
2>& 1 | tee -a $output_dir /benchmark_bulkload_fillrandom.log"
echo $cmd | tee $output_dir /benchmark_bulkload_fillrandom.log
eval $cmd
summarize_result $output_dir /benchmark_bulkload_fillrandom.log bulkload fillrandom
echo "Compacting..."
cmd = " ./db_bench $params_w --benchmarks=compact \
cmd = " ./db_bench --benchmarks=compact \
--use_existing_db= 1 \
--num= $num_keys \
--disable_auto_compactions= 1 \
--sync= 0 \
--disable_data_sync= 0 \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_bulkload_compact.log"
$params_w \
--threads= 1 \
2>& 1 | tee -a $output_dir /benchmark_bulkload_compact.log"
echo $cmd | tee $output_dir /benchmark_bulkload_compact.log
eval $cmd
}
function run_fillseq {
echo " Loading $num_keys keys sequentially into database... "
cmd = " ./db_bench $params_w --benchmarks=fillseq \
echo " Loading $num_keys keys sequentially "
cmd = " ./db_bench --benchmarks=fillseq \
--use_existing_db= 0 \
--num= $num_keys \
--sync= 0 \
--disable_data_sync= 0 \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_fillseq.log"
$params_w \
--threads= 1 \
2>& 1 | tee -a $output_dir /benchmark_fillseq.log"
echo $cmd | tee $output_dir /benchmark_fillseq.log
eval $cmd
summarize_result $output_dir /benchmark_fillseq.log fillseq fillseq
}
function run_overwrite {
echo " Loading $num_keys keys sequentially into database... "
cmd = " ./db_bench $params_w --benchmarks=overwrite \
function run_change {
operation = $1
echo " Do $num_keys random $operation "
out_name = " benchmark_ ${ operation } .t ${ num_threads } .s ${ syncval } .log "
cmd = " ./db_bench --benchmarks= $operation \
--use_existing_db= 1 \
--num= $num_keys \
--sync= 0 \
--disable_data_sync= 0 \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_overwrite.log"
echo $cmd | tee $output_dir /benchmark_overwrite.log
--sync= $syncval \
$params_w \
--threads= $num_threads \
--merge_operator= \" put\" \
2>& 1 | tee -a $output_dir /${ out_name } "
echo $cmd | tee $output_dir /${ out_name }
eval $cmd
summarize_result $output_dir /${ out_name } ${ operation } .t${ num_threads } .s${ syncval } $operation
}
function run_filluniquerandom {
echo " Loading $num_keys unique keys randomly into database... "
cmd = " ./db_bench $params_w --benchmarks=filluniquerandom \
echo " Loading $num_keys unique keys randomly "
cmd = " ./db_bench --benchmarks=filluniquerandom \
--use_existing_db= 0 \
--num= $num_keys \
--sync= 0 \
--disable_data_sync= 0 \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_filluniquerandom.log"
$params_w \
--threads= 1 \
2>& 1 | tee -a $output_dir /benchmark_filluniquerandom.log"
echo $cmd | tee $output_dir /benchmark_filluniquerandom.log
eval $cmd
summarize_result $output_dir /benchmark_filluniquerandom.log filluniquerandom filluniquerandom
}
function run_readrandom {
echo " Reading $num_keys random keys from database... "
cmd = " ./db_bench $params_r --benchmarks=readrandom \
--use_existing_db= 1 \
--num= $num_keys \
--threads= $num_read_threads \
--disable_auto_compactions= 1 \
2>& 1 | tee -a $output_dir /benchmark_readrandom.log"
echo $cmd | tee $output_dir /benchmark_readrandom.log
eval $cmd
}
function run_readwhilewriting {
echo " Reading $num_keys random keys from database while writing.. "
cmd = " ./db_bench $params_r --benchmarks=readwhilewriting \
echo " Reading $num_keys random keys "
out_name = " benchmark_readrandom.t ${ num_threads } .log "
cmd = " ./db_bench --benchmarks=readrandom \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--disable_data_sync= 0 \
--threads= $num_read_threads \
--writes_per_second= $writes_per_second \
2>& 1 | tee -a $output_dir /benchmark_readwhilewriting.log"
echo $cmd | tee $output_dir /benchmark_readwhilewriting.log
$params_w \
--threads= $num_threads \
2>& 1 | tee -a $output_dir /${ out_name } "
echo $cmd | tee $output_dir /${ out_name }
eval $cmd
summarize_result $output_dir /${ out_name } readrandom.t${ num_threads } readrandom
}
function run_readwhilemerging {
echo " Reading $num_keys random keys from database while merging.. "
cmd = " ./db_bench $params_r --benchmarks=readwhilemerging \
function run_readwhile {
operation = $1
echo " Reading $num_keys random keys while $operation "
out_name = " benchmark_readwhile ${ operation } .t ${ num_threads } .log "
cmd = " ./db_bench --benchmarks=readwhile ${ operation } \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--disable_data_sync= 0 \
--threads= $num_read_ threads \
$params_w \
--threads= $num_threads \
--writes_per_second= $writes_per_second \
--merge_operator= \" put\" \
2>& 1 | tee -a $output_dir /benchmark_readwhilemerging.log "
echo $cmd | tee $output_dir /benchmark_readwhilemerging.log
2>& 1 | tee -a $output_dir /${ out_name } "
echo $cmd | tee $output_dir /${ out_name }
eval $cmd
summarize_result $output_dir /${ out_name } readwhile${ operation } .t${ num_threads } readwhile${ operation }
}
function run_rangescanwhilewriting {
echo " Range scan $num_keys random keys from database while writing.. "
cmd = " ./db_bench $params_r --benchmarks=seekrandomwhilewriting \
function run_rangewhile {
operation = $1
full_name = $2
reverse_arg = $3
out_name = " benchmark_ ${ full_name } .t ${ num_threads } .log "
echo " Range scan $num_keys random keys while ${ operation } for reverse_iter= ${ reverse_arg } "
cmd = " ./db_bench --benchmarks=seekrandomwhile ${ operation } \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--disable_data_sync= 0 \
--threads= $num_read_ threads \
$params_w \
--threads= $num_threads \
--writes_per_second= $writes_per_second \
--merge_operator= \" put\" \
--seek_nexts= $num_nexts_per_seek \
2>& 1 | tee -a $output_dir /benchmark_rangescanwhilewriting.log"
echo $cmd | tee $output_dir /benchmark_rangescanwhilewriting.log
--reverse_iterator= $reverse_arg \
2>& 1 | tee -a $output_dir /${ out_name } "
echo $cmd | tee $output_dir /${ out_name }
eval $cmd
summarize_result $output_dir /${ out_name } ${ full_name } .t${ num_threads } seekrandomwhile${ operation }
}
function run_updaterandom {
echo " Read/Modify/Write $num_keys random keys (not using merge).. "
cmd = " ./db_bench $params_w --benchmarks=updaterandom \
function run_range {
full_name = $1
reverse_arg = $2
out_name = " benchmark_ ${ full_name } .t ${ num_threads } .log "
echo " Range scan $num_keys random keys for reverse_iter= ${ reverse_arg } "
cmd = " ./db_bench --benchmarks=seekrandom \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--disable_data_sync= 0 \
--threads= $num_read_threads 2>& 1 | tee -a $output_dir /benchmark_updaterandom.log"
echo $cmd | tee $output_dir /benchmark_updaterandom.log
eval $cmd
}
function run_mergerandom {
echo " Read/Modify/Write $num_keys random keys (using merge operator).. "
cmd = " ./db_bench $params_w --benchmarks=mergerandom \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--disable_data_sync= 0 \
--merge_operator= \" put\" \
--threads= $num_read_threads 2>& 1 | tee -a $output_dir /benchmark_mergerandom.log"
echo $cmd | tee $output_dir /benchmark_mergerandom.log
$params_w \
--threads= $num_threads \
--seek_nexts= $num_nexts_per_seek \
--reverse_iterator= $reverse_arg \
2>& 1 | tee -a $output_dir /${ out_name } "
echo $cmd | tee $output_dir /${ out_name }
eval $cmd
summarize_result $output_dir /${ out_name } ${ full_name } .t${ num_threads } seekrandom
}
function now( ) {
@ -241,6 +260,7 @@ function now() {
}
report = " $output_dir /report.txt "
schedule = " $output_dir /schedule.txt "
echo "===== Benchmark ====="
@ -249,7 +269,7 @@ IFS=',' read -a jobs <<< $1
for job in ${ jobs [@] } ; do
if [ $job != debug ] ; then
echo " Start $job at `date` " | tee -a $report
echo " Start $job at `date` " | tee -a $schedule
fi
start = $( now)
@ -258,21 +278,31 @@ for job in ${jobs[@]}; do
elif [ $job = fillseq ] ; then
run_fillseq
elif [ $job = overwrite ] ; then
run_overwrite
run_change overwrite
elif [ $job = updaterandom ] ; then
run_change updaterandom
elif [ $job = mergerandom ] ; then
run_change mergerandom
elif [ $job = filluniquerandom ] ; then
run_filluniquerandom
elif [ $job = readrandom ] ; then
run_readrandom
elif [ $job = fwdrange ] ; then
run_range $job false
elif [ $job = revrange ] ; then
run_range $job true
elif [ $job = readwhilewriting ] ; then
run_readwhilewriting
run_readwhile writing
elif [ $job = readwhilemerging ] ; then
run_readwhilemerging
elif [ $job = rangescanwhilewriting ] ; then
run_rangescanwhilewriting
elif [ $job = updaterandom ] ; then
run_updaterandom
elif [ $job = mergerandom ] ; then
run_mergerandom
run_readwhile merging
elif [ $job = fwdrangewhilewriting ] ; then
run_rangewhile writing $job false
elif [ $job = revrangewhilewriting ] ; then
run_rangewhile writing $job true
elif [ $job = fwdrangewhilemerging ] ; then
run_rangewhile merging $job false
elif [ $job = revrangewhilemerging ] ; then
run_rangewhile merging $job true
elif [ $job = debug ] ; then
num_keys = 1000; # debug
echo " Setting num_keys to $num_keys "
@ -283,24 +313,10 @@ for job in ${jobs[@]}; do
end = $( now)
if [ $job != debug ] ; then
echo " Complete $job in $(( end-start)) seconds " | tee -a $report
echo " Complete $job in $(( end-start)) seconds " | tee -a $schedule
fi
if [ [ $job = = readrandom || $job = = readwhilewriting \
|| $job = = rangescanwhilewriting || $job = = updaterandom \
|| $job = = mergerandom || $job = = readwhilemerging ] ] ; then
lat = $( grep "micros\/op" " $output_dir /benchmark_ $job .log " \
| grep "ops\/sec" | awk '{print $3}' )
qps = $( grep "micros\/op" " $output_dir /benchmark_ $job .log " \
| grep "ops\/sec" | awk '{print $5}' )
line = $( grep "rocksdb.db.get.micros" " $output_dir /benchmark_ $job .log " )
p50 = $( echo $line | awk '{print $7}' )
p99 = $( echo $line | awk '{print $13}' )
print_percentile = $( echo " $p50 != 0 || $p99 != 0 " | bc) ;
if [ " $print_percentile " = = "1" ] ; then
echo " Read latency p50 = $p50 us, p99 = $p99 us " | tee -a $report
fi
echo " QPS = $qps ops/sec " | tee -a $report
echo " Avg Latency = $lat micros/op " | tee -a $report
fi
echo -e "ops/sec\tmb/sec\tL0_MB\tSum_GB\tW-Amp\tW-MB/s\tusec/op\tp50\tp75\tp99\tUptime\tStall-time\tStall%\tTest"
tail -1 $output_dir /report.txt
done