@ -35,21 +35,24 @@ if [ ! -z $DB_BENCH_NO_SYNC ]; then
syncval = "0" ;
syncval = "0" ;
fi
fi
num_read_threads = ${ NUM_READ_THREADS :- 16 }
num_threads = ${ NUM_THREADS :- 16 }
# Only for readwhilewriting, readwhilemerging
# Only for *whilewriting, *whilemerging
writes_per_second = ${ WRITES_PER_SEC :- $(( 80 * K)) }
writes_per_second = ${ WRITES_PER_SECOND :- $(( 10 * K)) }
num_nexts_per_seek = ${ NUM_NEXTS_PER_SEEK :- 10 } # (only for rangescanwhilewriting)
# Only for tests that do range scans
cache_size = $(( 1 * G))
num_nexts_per_seek = ${ NUM_NEXTS_PER_SEEK :- 10 }
cache_size = ${ CACHE_SIZE :- $(( 1 * G)) }
duration = ${ DURATION :- 0 }
duration = ${ DURATION :- 0 }
num_keys = ${ NUM_KEYS :- $(( 1 * G)) }
num_keys = ${ NUM_KEYS :- $(( 1 * G)) }
key_size = 20
key_size = 20
value_size = 800
value_size = ${ VALUE_SIZE :- 400 }
const_params = "
const_params = "
--db= $DB_DIR \
--db= $DB_DIR \
--wal_dir= $WAL_DIR \
--wal_dir= $WAL_DIR \
--disable_data_sync= 0 \
\
\
--num= $num_keys \
--num_levels= 6 \
--num_levels= 6 \
--key_size= $key_size \
--key_size= $key_size \
--value_size= $value_size \
--value_size= $value_size \
@ -57,10 +60,10 @@ const_params="
--cache_size= $cache_size \
--cache_size= $cache_size \
--cache_numshardbits= 6 \
--cache_numshardbits= 6 \
--compression_type= zlib \
--compression_type= zlib \
--min_level_to_compress= 2 \
--min_level_to_compress= 3 \
--compression_ratio= 0.5 \
--compression_ratio= 0.5 \
\
\
--hard_rate_limit= 2 \
--hard_rate_limit= 3 \
--rate_limit_delay_max_milliseconds= 1000000 \
--rate_limit_delay_max_milliseconds= 1000000 \
--write_buffer_size= $(( 128 * M)) \
--write_buffer_size= $(( 128 * M)) \
--max_write_buffer_number= 3 \
--max_write_buffer_number= 3 \
@ -69,7 +72,8 @@ const_params="
\
\
--verify_checksum= 1 \
--verify_checksum= 1 \
--delete_obsolete_files_period_micros= $(( 60 * M)) \
--delete_obsolete_files_period_micros= $(( 60 * M)) \
--max_grandparent_overlap_factor= 10 \
--max_grandparent_overlap_factor= 8 \
--max_bytes_for_level_multiplier= 8 \
\
\
--statistics= 1 \
--statistics= 1 \
--stats_per_interval= 1 \
--stats_per_interval= 1 \
@ -82,158 +86,173 @@ const_params="
l0_config = "
l0_config = "
--level0_file_num_compaction_trigger= 4 \
--level0_file_num_compaction_trigger= 4 \
--level0_slowdown_writes_trigger= 8 \
--level0_slowdown_writes_trigger= 12 \
--level0_stop_writes_trigger= 1 2"
--level0_stop_writes_trigger= 20 "
if [ $duration -gt 0 ] ; then
if [ $duration -gt 0 ] ; then
const_params = " $const_params --duration= $duration "
const_params = " $const_params --duration= $duration "
fi
fi
params_r = " $const_params $l0_config --max_background_compactions=4 --max_background_flushes=1 "
params_w = " $const_params $l0_config --max_background_compactions=16 --max_background_flushes=16 "
params_w = " $const_params $l0_config --max_background_compactions=16 --max_background_flushes=16 "
params_bulkload = " $const_params --max_background_compactions=16 --max_background_flushes=16 \
params_bulkload = " $const_params --max_background_compactions=16 --max_background_flushes=16 \
--level0_file_num_compaction_trigger= $(( 10 * M)) \
--level0_file_num_compaction_trigger= $(( 10 * M)) \
--level0_slowdown_writes_trigger= $(( 10 * M)) \
--level0_slowdown_writes_trigger= $(( 10 * M)) \
--level0_stop_writes_trigger= $(( 10 * M)) "
--level0_stop_writes_trigger= $(( 10 * M)) "
function summarize_result {
test_out = $1
test_name = $2
bench_name = $3
uptime = $( grep ^Uptime\( secs $test_out | tail -1 | awk '{ printf "%.0f", $2 }' )
stall_time = $( grep "^Cumulative stall" $test_out | tail -1 | awk '{ print $3 }' )
stall_pct = $( grep "^Cumulative stall" $test_out | tail -1 | awk '{ print $5 }' )
ops_sec = $( grep ^${ bench_name } $test_out | awk '{ print $5 }' )
mb_sec = $( grep ^${ bench_name } $test_out | awk '{ print $7 }' )
lo_wgb = $( grep "^ L0" $test_out | tail -1 | awk '{ print $8 }' )
sum_wgb = $( grep "^ Sum" $test_out | tail -1 | awk '{ print $8 }' )
wamp = $( echo " scale=1; $sum_wgb / $lo_wgb " | bc )
wmb_ps = $( echo " scale=1; ( $sum_wgb * 1024.0 ) / $uptime " | bc )
usecs_op = $( grep ^${ bench_name } $test_out | awk '{ printf "%.1f", $3 }' )
p50 = $( grep "^Percentiles:" $test_out | awk '{ printf "%.1f", $3 }' )
p75 = $( grep "^Percentiles:" $test_out | awk '{ printf "%.1f", $5 }' )
p99 = $( grep "^Percentiles:" $test_out | awk '{ printf "%.1f", $7 }' )
echo -e " $ops_sec \t $mb_sec \t $lo_wgb \t $sum_wgb \t $wamp \t $wmb_ps \t $usecs_op \t $p50 \t $p75 \t $p99 \t $uptime \t $stall_time \t $stall_pct \t $test_name " \
>> $output_dir /report.txt
}
function run_bulkload {
function run_bulkload {
echo " Bulk loading $num_keys random keys into database... "
echo " Bulk loading $num_keys random keys "
cmd = " ./db_bench $params_bulkload --benchmarks=fillrandom \
cmd = " ./db_bench --benchmarks=fillrandom \
--use_existing_db= 0 \
--use_existing_db= 0 \
--num= $num_keys \
--disable_auto_compactions= 1 \
--disable_auto_compactions= 1 \
--sync= 0 \
--sync= 0 \
--disable_data_sync= 0 \
$params_bulkload \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_bulkload_fillrandom.log"
--threads= 1 \
2>& 1 | tee -a $output_dir /benchmark_bulkload_fillrandom.log"
echo $cmd | tee $output_dir /benchmark_bulkload_fillrandom.log
echo $cmd | tee $output_dir /benchmark_bulkload_fillrandom.log
eval $cmd
eval $cmd
summarize_result $output_dir /benchmark_bulkload_fillrandom.log bulkload fillrandom
echo "Compacting..."
echo "Compacting..."
cmd = " ./db_bench $params_w --benchmarks=compact \
cmd = " ./db_bench --benchmarks=compact \
--use_existing_db= 1 \
--use_existing_db= 1 \
--num= $num_keys \
--disable_auto_compactions= 1 \
--disable_auto_compactions= 1 \
--sync= 0 \
--sync= 0 \
--disable_data_sync= 0 \
$params_w \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_bulkload_compact.log"
--threads= 1 \
2>& 1 | tee -a $output_dir /benchmark_bulkload_compact.log"
echo $cmd | tee $output_dir /benchmark_bulkload_compact.log
echo $cmd | tee $output_dir /benchmark_bulkload_compact.log
eval $cmd
eval $cmd
}
}
function run_fillseq {
function run_fillseq {
echo " Loading $num_keys keys sequentially into database... "
echo " Loading $num_keys keys sequentially "
cmd = " ./db_bench $params_w --benchmarks=fillseq \
cmd = " ./db_bench --benchmarks=fillseq \
--use_existing_db= 0 \
--use_existing_db= 0 \
--num= $num_keys \
--sync= 0 \
--sync= 0 \
--disable_data_sync= 0 \
$params_w \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_fillseq.log"
--threads= 1 \
2>& 1 | tee -a $output_dir /benchmark_fillseq.log"
echo $cmd | tee $output_dir /benchmark_fillseq.log
echo $cmd | tee $output_dir /benchmark_fillseq.log
eval $cmd
eval $cmd
summarize_result $output_dir /benchmark_fillseq.log fillseq fillseq
}
}
function run_overwrite {
function run_change {
echo " Loading $num_keys keys sequentially into database... "
operation = $1
cmd = " ./db_bench $params_w --benchmarks=overwrite \
echo " Do $num_keys random $operation "
out_name = " benchmark_ ${ operation } .t ${ num_threads } .s ${ syncval } .log "
cmd = " ./db_bench --benchmarks= $operation \
--use_existing_db= 1 \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--sync= 0 \
$params_w \
--disable_data_sync= 0 \
--threads= $num_threads \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_overwrite.log"
--merge_operator= \" put\" \
echo $cmd | tee $output_dir /benchmark_overwrite.log
2>& 1 | tee -a $output_dir /${ out_name } "
echo $cmd | tee $output_dir /${ out_name }
eval $cmd
eval $cmd
summarize_result $output_dir /${ out_name } ${ operation } .t${ num_threads } .s${ syncval } $operation
}
}
function run_filluniquerandom {
function run_filluniquerandom {
echo " Loading $num_keys unique keys randomly into database... "
echo " Loading $num_keys unique keys randomly "
cmd = " ./db_bench $params_w --benchmarks=filluniquerandom \
cmd = " ./db_bench --benchmarks=filluniquerandom \
--use_existing_db= 0 \
--use_existing_db= 0 \
--num= $num_keys \
--sync= 0 \
--sync= 0 \
--disable_data_sync= 0 \
$params_w \
--threads= 1 2>& 1 | tee -a $output_dir /benchmark_filluniquerandom.log"
--threads= 1 \
2>& 1 | tee -a $output_dir /benchmark_filluniquerandom.log"
echo $cmd | tee $output_dir /benchmark_filluniquerandom.log
echo $cmd | tee $output_dir /benchmark_filluniquerandom.log
eval $cmd
eval $cmd
summarize_result $output_dir /benchmark_filluniquerandom.log filluniquerandom filluniquerandom
}
}
function run_readrandom {
function run_readrandom {
echo " Reading $num_keys random keys from database... "
echo " Reading $num_keys random keys "
cmd = " ./db_bench $params_r --benchmarks=readrandom \
out_name = " benchmark_readrandom.t ${ num_threads } .log "
cmd = " ./db_bench --benchmarks=readrandom \
--use_existing_db= 1 \
--use_existing_db= 1 \
--num= $num_keys \
$params_w \
--threads= $num_read_threads \
--threads= $num_threads \
--disable_auto_compactions= 1 \
2>& 1 | tee -a $output_dir /${ out_name } "
2>& 1 | tee -a $output_dir /benchmark_readrandom.log"
echo $cmd | tee $output_dir /${ out_name }
echo $cmd | tee $output_dir /benchmark_readrandom.log
eval $cmd
}
function run_readwhilewriting {
echo " Reading $num_keys random keys from database while writing.. "
cmd = " ./db_bench $params_r --benchmarks=readwhilewriting \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--disable_data_sync= 0 \
--threads= $num_read_threads \
--writes_per_second= $writes_per_second \
2>& 1 | tee -a $output_dir /benchmark_readwhilewriting.log"
echo $cmd | tee $output_dir /benchmark_readwhilewriting.log
eval $cmd
eval $cmd
summarize_result $output_dir /${ out_name } readrandom.t${ num_threads } readrandom
}
}
function run_readwhilemerging {
function run_readwhile {
echo " Reading $num_keys random keys from database while merging.. "
operation = $1
cmd = " ./db_bench $params_r --benchmarks=readwhilemerging \
echo " Reading $num_keys random keys while $operation "
out_name = " benchmark_readwhile ${ operation } .t ${ num_threads } .log "
cmd = " ./db_bench --benchmarks=readwhile ${ operation } \
--use_existing_db= 1 \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--sync= $syncval \
--disable_data_sync= 0 \
$params_w \
--threads= $num_read_ threads \
--threads= $num_threads \
--writes_per_second= $writes_per_second \
--writes_per_second= $writes_per_second \
--merge_operator= \" put\" \
--merge_operator= \" put\" \
2>& 1 | tee -a $output_dir /benchmark_readwhilemerging.log "
2>& 1 | tee -a $output_dir /${ out_name } "
echo $cmd | tee $output_dir /benchmark_readwhilemerging.log
echo $cmd | tee $output_dir /${ out_name }
eval $cmd
eval $cmd
summarize_result $output_dir /${ out_name } readwhile${ operation } .t${ num_threads } readwhile${ operation }
}
}
function run_rangescanwhilewriting {
function run_rangewhile {
echo " Range scan $num_keys random keys from database while writing.. "
operation = $1
cmd = " ./db_bench $params_r --benchmarks=seekrandomwhilewriting \
full_name = $2
reverse_arg = $3
out_name = " benchmark_ ${ full_name } .t ${ num_threads } .log "
echo " Range scan $num_keys random keys while ${ operation } for reverse_iter= ${ reverse_arg } "
cmd = " ./db_bench --benchmarks=seekrandomwhile ${ operation } \
--use_existing_db= 1 \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--sync= $syncval \
--disable_data_sync= 0 \
$params_w \
--threads= $num_read_ threads \
--threads= $num_threads \
--writes_per_second= $writes_per_second \
--writes_per_second= $writes_per_second \
--merge_operator= \" put\" \
--seek_nexts= $num_nexts_per_seek \
--seek_nexts= $num_nexts_per_seek \
2>& 1 | tee -a $output_dir /benchmark_rangescanwhilewriting.log"
--reverse_iterator= $reverse_arg \
echo $cmd | tee $output_dir /benchmark_rangescanwhilewriting.log
2>& 1 | tee -a $output_dir /${ out_name } "
eval $cmd
echo $cmd | tee $output_dir /${ out_name }
}
function run_updaterandom {
echo " Read/Modify/Write $num_keys random keys (not using merge).. "
cmd = " ./db_bench $params_w --benchmarks=updaterandom \
--use_existing_db= 1 \
--num= $num_keys \
--sync= $syncval \
--disable_data_sync= 0 \
--threads= $num_read_threads 2>& 1 | tee -a $output_dir /benchmark_updaterandom.log"
echo $cmd | tee $output_dir /benchmark_updaterandom.log
eval $cmd
eval $cmd
summarize_result $output_dir /${ out_name } ${ full_name } .t${ num_threads } seekrandomwhile${ operation }
}
}
function run_mergerandom {
function run_range {
echo " Read/Modify/Write $num_keys random keys (using merge operator).. "
full_name = $1
cmd = " ./db_bench $params_w --benchmarks=mergerandom \
reverse_arg = $2
out_name = " benchmark_ ${ full_name } .t ${ num_threads } .log "
echo " Range scan $num_keys random keys for reverse_iter= ${ reverse_arg } "
cmd = " ./db_bench --benchmarks=seekrandom \
--use_existing_db= 1 \
--use_existing_db= 1 \
--num= $num_keys \
$params_w \
--sync= $syncval \
--threads= $num_threads \
--disable_data_sync= 0 \
--seek_nexts= $num_nexts_per_seek \
--merge_operator= \" put\" \
--reverse_iterator= $reverse_arg \
--threads= $num_read_threads 2>& 1 | tee -a $output_dir /benchmark_mergerandom.log "
2>& 1 | tee -a $output_dir /${ out_name } "
echo $cmd | tee $output_dir /benchmark_mergerandom.log
echo $cmd | tee $output_dir /${ out_name }
eval $cmd
eval $cmd
summarize_result $output_dir /${ out_name } ${ full_name } .t${ num_threads } seekrandom
}
}
function now( ) {
function now( ) {
@ -241,6 +260,7 @@ function now() {
}
}
report = " $output_dir /report.txt "
report = " $output_dir /report.txt "
schedule = " $output_dir /schedule.txt "
echo "===== Benchmark ====="
echo "===== Benchmark ====="
@ -249,7 +269,7 @@ IFS=',' read -a jobs <<< $1
for job in ${ jobs [@] } ; do
for job in ${ jobs [@] } ; do
if [ $job != debug ] ; then
if [ $job != debug ] ; then
echo " Start $job at `date` " | tee -a $report
echo " Start $job at `date` " | tee -a $schedule
fi
fi
start = $( now)
start = $( now)
@ -258,21 +278,31 @@ for job in ${jobs[@]}; do
elif [ $job = fillseq ] ; then
elif [ $job = fillseq ] ; then
run_fillseq
run_fillseq
elif [ $job = overwrite ] ; then
elif [ $job = overwrite ] ; then
run_overwrite
run_change overwrite
elif [ $job = updaterandom ] ; then
run_change updaterandom
elif [ $job = mergerandom ] ; then
run_change mergerandom
elif [ $job = filluniquerandom ] ; then
elif [ $job = filluniquerandom ] ; then
run_filluniquerandom
run_filluniquerandom
elif [ $job = readrandom ] ; then
elif [ $job = readrandom ] ; then
run_readrandom
run_readrandom
elif [ $job = fwdrange ] ; then
run_range $job false
elif [ $job = revrange ] ; then
run_range $job true
elif [ $job = readwhilewriting ] ; then
elif [ $job = readwhilewriting ] ; then
run_readwhilewriting
run_readwhile writing
elif [ $job = readwhilemerging ] ; then
elif [ $job = readwhilemerging ] ; then
run_readwhilemerging
run_readwhile merging
elif [ $job = rangescanwhilewriting ] ; then
elif [ $job = fwdrangewhilewriting ] ; then
run_rangescanwhilewriting
run_rangewhile writing $job false
elif [ $job = updaterandom ] ; then
elif [ $job = revrangewhilewriting ] ; then
run_updaterandom
run_rangewhile writing $job true
elif [ $job = mergerandom ] ; then
elif [ $job = fwdrangewhilemerging ] ; then
run_mergerandom
run_rangewhile merging $job false
elif [ $job = revrangewhilemerging ] ; then
run_rangewhile merging $job true
elif [ $job = debug ] ; then
elif [ $job = debug ] ; then
num_keys = 1000; # debug
num_keys = 1000; # debug
echo " Setting num_keys to $num_keys "
echo " Setting num_keys to $num_keys "
@ -283,24 +313,10 @@ for job in ${jobs[@]}; do
end = $( now)
end = $( now)
if [ $job != debug ] ; then
if [ $job != debug ] ; then
echo " Complete $job in $(( end-start)) seconds " | tee -a $report
echo " Complete $job in $(( end-start)) seconds " | tee -a $schedule
fi
fi
if [ [ $job = = readrandom || $job = = readwhilewriting \
echo -e "ops/sec\tmb/sec\tL0_MB\tSum_GB\tW-Amp\tW-MB/s\tusec/op\tp50\tp75\tp99\tUptime\tStall-time\tStall%\tTest"
|| $job = = rangescanwhilewriting || $job = = updaterandom \
tail -1 $output_dir /report.txt
|| $job = = mergerandom || $job = = readwhilemerging ] ] ; then
lat = $( grep "micros\/op" " $output_dir /benchmark_ $job .log " \
| grep "ops\/sec" | awk '{print $3}' )
qps = $( grep "micros\/op" " $output_dir /benchmark_ $job .log " \
| grep "ops\/sec" | awk '{print $5}' )
line = $( grep "rocksdb.db.get.micros" " $output_dir /benchmark_ $job .log " )
p50 = $( echo $line | awk '{print $7}' )
p99 = $( echo $line | awk '{print $13}' )
print_percentile = $( echo " $p50 != 0 || $p99 != 0 " | bc) ;
if [ " $print_percentile " = = "1" ] ; then
echo " Read latency p50 = $p50 us, p99 = $p99 us " | tee -a $report
fi
echo " QPS = $qps ops/sec " | tee -a $report
echo " Avg Latency = $lat micros/op " | tee -a $report
fi
done
done