Add micro-benchmark support (#8493)

Summary:
Add google benchmark for microbench.
Add ribbon_bench for benchmark ribbon filter vs. other filters.

Pull Request resolved: https://github.com/facebook/rocksdb/pull/8493

Test Plan:
added test to CI
To run the benchmark on devhost:
Install benchmark: `$ sudo dnf install google-benchmark-devel`
Build and run:
`$ ROCKSDB_NO_FBCODE=1 DEBUG_LEVEL=0 make microbench`
or with cmake:
`$ mkdir build && cd build && cmake .. -DCMAKE_BUILD_TYPE=Release -DWITH_BENCHMARK=1 && make microbench`

Reviewed By: pdillinger

Differential Revision: D29589649

Pulled By: jay-zhuang

fbshipit-source-id: 8fed13b562bef4472f161ecacec1ab6b18911dff
main
Jay Zhuang 4 years ago committed by Facebook GitHub Bot
parent f127d459ad
commit 5dd18a8d8e
  1. 24
      .circleci/config.yml
  2. 5
      CMakeLists.txt
  3. 12
      Makefile
  4. 10
      build_tools/build_detect_platform
  5. 16
      microbench/CMakeLists.txt
  6. 156
      microbench/ribbon_bench.cc
  7. 2
      src.mk

@ -91,6 +91,13 @@ commands:
command: |
sudo apt-get update -y && sudo apt-get install -y libgflags-dev
install-benchmark:
steps:
- run: # currently doesn't support ubuntu-1604 which doesn't have libbenchmark package, user can still install by building it youself
name: Install benchmark
command: |
sudo apt-get update -y && sudo apt-get install -y libbenchmark-dev
upgrade-cmake:
steps:
- run:
@ -317,7 +324,8 @@ jobs:
steps:
- checkout # check out the code in the project directory
- install-gflags
- run: (mkdir build && cd build && cmake -DWITH_GFLAGS=1 .. && make V=1 -j20 && ctest -j20) | .circleci/cat_ignore_eagain
- install-benchmark
- run: (mkdir build && cd build && cmake -DWITH_GFLAGS=1 -DWITH_BENCHMARK=1 .. && make V=1 -j20 && ctest -j20 && make microbench) | .circleci/cat_ignore_eagain
- post-steps
build-linux-unity:
@ -370,6 +378,17 @@ jobs:
- run: CC=gcc-10 CXX=g++-10 V=1 SKIP_LINK=1 ROCKSDB_CXX_STANDARD=c++20 make -j16 all | .circleci/cat_ignore_eagain # Linking broken because libgflags compiled with newer ABI
- post-steps
# This job is only to make sure the microbench tests are able to run, the benchmark result is not meaningful as the CI host is changing.
build-linux-microbench:
machine:
image: ubuntu-2004:202010-01
resource_class: xlarge
steps:
- pre-steps
- install-benchmark
- run: DEBUG_LEVEL=0 make microbench | .circleci/cat_ignore_eagain
- post-steps
build-windows:
executor: windows-2xlarge
parameters:
@ -778,6 +797,9 @@ workflows:
build-linux-arm:
jobs:
- build-linux-arm
build-microbench:
jobs:
- build-linux-microbench
nightly:
triggers:
- schedule:

@ -1422,3 +1422,8 @@ option(WITH_EXAMPLES "build with examples" OFF)
if(WITH_EXAMPLES)
add_subdirectory(examples)
endif()
option(WITH_BENCHMARK "build benchmark tests" OFF)
if(WITH_BENCHMARK)
add_subdirectory(${PROJECT_SOURCE_DIR}/microbench/)
endif()

@ -505,7 +505,7 @@ STRESS_OBJECTS = $(patsubst %.cc, $(OBJ_DIR)/%.o, $(STRESS_LIB_SOURCES))
# Exclude build_version.cc -- a generated source file -- from all sources. Not needed for dependencies
ALL_SOURCES = $(filter-out util/build_version.cc, $(LIB_SOURCES)) $(TEST_LIB_SOURCES) $(MOCK_LIB_SOURCES) $(GTEST_DIR)/gtest/gtest-all.cc
ALL_SOURCES += $(TOOL_LIB_SOURCES) $(BENCH_LIB_SOURCES) $(CACHE_BENCH_LIB_SOURCES) $(ANALYZER_LIB_SOURCES) $(STRESS_LIB_SOURCES)
ALL_SOURCES += $(TEST_MAIN_SOURCES) $(TOOL_MAIN_SOURCES) $(BENCH_MAIN_SOURCES)
ALL_SOURCES += $(TEST_MAIN_SOURCES) $(TOOL_MAIN_SOURCES) $(BENCH_MAIN_SOURCES) $(MICROBENCH_SOURCES)
TESTS = $(patsubst %.cc, %, $(notdir $(TEST_MAIN_SOURCES)))
TESTS += $(patsubst %.c, %, $(notdir $(TEST_MAIN_SOURCES_C)))
@ -601,6 +601,8 @@ TEST_LIBS = \
# TODO: add back forward_iterator_bench, after making it build in all environemnts.
BENCHMARKS = $(patsubst %.cc, %, $(notdir $(BENCH_MAIN_SOURCES)))
MICROBENCHS = $(patsubst %.cc, %, $(notdir $(MICROBENCH_SOURCES)))
# if user didn't config LIBNAME, set the default
ifeq ($(LIBNAME),)
LIBNAME=librocksdb
@ -739,6 +741,9 @@ test_libs: $(TEST_LIBS)
benchmarks: $(BENCHMARKS)
microbench: $(MICROBENCHS)
for t in $(MICROBENCHS); do echo "===== Running benchmark $$t (`date`)"; ./$$t || exit 1; done;
dbg: $(LIBRARY) $(BENCHMARKS) tools $(TESTS)
# creates library and programs
@ -1176,7 +1181,7 @@ clean-not-downloaded: clean-ext-libraries-bin clean-rocks clean-not-downloaded-r
clean-rocks:
echo shared=$(ALL_SHARED_LIBS)
echo static=$(ALL_STATIC_LIBS)
rm -f $(BENCHMARKS) $(TOOLS) $(TESTS) $(PARALLEL_TEST) $(ALL_STATIC_LIBS) $(ALL_SHARED_LIBS)
rm -f $(BENCHMARKS) $(TOOLS) $(TESTS) $(PARALLEL_TEST) $(ALL_STATIC_LIBS) $(ALL_SHARED_LIBS) $(MICROBENCHS)
rm -rf $(CLEAN_FILES) ios-x86 ios-arm scan_build_report
$(FIND) . -name "*.[oda]" -exec rm -f {} \;
$(FIND) . -type f -regex ".*\.\(\(gcda\)\|\(gcno\)\)" -exec rm -f {} \;
@ -1890,6 +1895,9 @@ db_write_buffer_manager_test: $(OBJ_DIR)/db/db_write_buffer_manager_test.o $(TES
clipping_iterator_test: $(OBJ_DIR)/db/compaction/clipping_iterator_test.o $(TEST_LIBRARY) $(LIBRARY)
$(AM_LINK)
ribbon_bench: $(OBJ_DIR)/microbench/ribbon_bench.o $(LIBRARY)
$(AM_LINK)
#-------------------------------------------------
# make install related stuff
PREFIX ?= /usr/local

@ -596,6 +596,16 @@ EOF
PLATFORM_CXXFLAGS="$PLATFORM_CXXFLAGS -faligned-new -DHAVE_ALIGNED_NEW"
fi
fi
if ! test $ROCKSDB_DISABLE_BENCHMARK; then
# Test whether google benchmark is available
$CXX $PLATFORM_CXXFLAGS -x c++ - -o /dev/null -lbenchmark 2>/dev/null <<EOF
#include <benchmark/benchmark.h>
int main() {}
EOF
if [ "$?" = 0 ]; then
PLATFORM_LDFLAGS="$PLATFORM_LDFLAGS -lbenchmark"
fi
fi
fi
# TODO(tec): Fix -Wshorten-64-to-32 errors on FreeBSD and enable the warning.

@ -0,0 +1,16 @@
find_package(benchmark REQUIRED)
find_package(Threads REQUIRED)
file(GLOB_RECURSE ALL_BENCH_CPP *.cc)
foreach(ONE_BENCH_CPP ${ALL_BENCH_CPP})
get_filename_component(TARGET_NAME ${ONE_BENCH_CPP} NAME_WE)
add_executable(${TARGET_NAME} ${ONE_BENCH_CPP})
target_link_libraries(${TARGET_NAME} ${ROCKSDB_LIB} benchmark::benchmark
${CMAKE_THREAD_LIBS_INIT})
# run benchmark like a test, if added, the benchmark tests could be run by `ctest -R Bench_`
# add_test(Bench_${TARGET_NAME} ${TARGET_NAME})
list(APPEND ALL_BENCH_TARGETS ${TARGET_NAME})
endforeach()
add_custom_target(microbench
COMMAND for t in ${ALL_BENCH_TARGETS}\; do \.\/$$t \|\| exit 1\; done
DEPENDS ${ALL_BENCH_TARGETS})

@ -0,0 +1,156 @@
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
// This source code is licensed under both the GPLv2 (found in the
// COPYING file in the root directory) and Apache 2.0 License
// (found in the LICENSE.Apache file in the root directory).
// this is a simple micro-benchmark for compare ribbon filter vs. other filter
// for more comprehensive, please check the dedicate util/filter_bench.
#include <benchmark/benchmark.h>
#include "table/block_based/filter_policy_internal.h"
#include "table/block_based/mock_block_based_table.h"
namespace ROCKSDB_NAMESPACE {
struct KeyMaker {
explicit KeyMaker(size_t avg_size)
: smallest_size_(avg_size),
buf_size_(avg_size + 11), // pad to vary key size and alignment
buf_(new char[buf_size_]) {
memset(buf_.get(), 0, buf_size_);
assert(smallest_size_ > 8);
}
size_t smallest_size_;
size_t buf_size_;
std::unique_ptr<char[]> buf_;
// Returns a unique(-ish) key based on the given parameter values. Each
// call returns a Slice from the same buffer so previously returned
// Slices should be considered invalidated.
Slice Get(uint32_t filter_num, uint32_t val_num) const {
size_t start = val_num % 4;
size_t len = smallest_size_;
// To get range [avg_size - 2, avg_size + 2]
// use range [smallest_size, smallest_size + 4]
len += FastRange32((val_num >> 5) * 1234567891, 5);
char *data = buf_.get() + start;
// Populate key data such that all data makes it into a key of at
// least 8 bytes. We also don't want all the within-filter key
// variance confined to a contiguous 32 bits, because then a 32 bit
// hash function can "cheat" the false positive rate by
// approximating a perfect hash.
EncodeFixed32(data, val_num);
EncodeFixed32(data + 4, filter_num + val_num);
// ensure clearing leftovers from different alignment
EncodeFixed32(data + 8, 0);
return {data, len};
}
};
// benchmark arguments:
// 0. filter mode
// 1. filter config bits_per_key
// 2. average data key length
// 3. data entry number
static void CustomArguments(benchmark::internal::Benchmark *b) {
for (int filterMode :
{BloomFilterPolicy::kLegacyBloom, BloomFilterPolicy::kFastLocalBloom,
BloomFilterPolicy::kStandard128Ribbon}) {
// for (int bits_per_key : {4, 10, 20, 30}) {
for (int bits_per_key : {10, 20}) {
for (int key_len_avg : {10, 100}) {
for (int64_t entry_num : {1 << 10, 1 << 20}) {
b->Args({filterMode, bits_per_key, key_len_avg, entry_num});
}
}
}
}
}
static void FilterBuild(benchmark::State &state) {
// setup data
auto filter = new BloomFilterPolicy(
static_cast<double>(state.range(1)),
static_cast<BloomFilterPolicy::Mode>(state.range(0)));
auto tester = new mock::MockBlockBasedTableTester(filter);
KeyMaker km(state.range(2));
std::unique_ptr<const char[]> owner;
const int64_t kEntryNum = state.range(3);
auto rnd = Random32(12345);
uint32_t filter_num = rnd.Next();
// run the test
for (auto _ : state) {
std::unique_ptr<FilterBitsBuilder> builder(tester->GetBuilder());
for (uint32_t i = 0; i < kEntryNum; i++) {
builder->AddKey(km.Get(filter_num, i));
}
auto ret = builder->Finish(&owner);
state.counters["size"] = static_cast<double>(ret.size());
}
}
BENCHMARK(FilterBuild)->Apply(CustomArguments);
static void FilterQueryPositive(benchmark::State &state) {
// setup data
auto filter = new BloomFilterPolicy(
static_cast<double>(state.range(1)),
static_cast<BloomFilterPolicy::Mode>(state.range(0)));
auto tester = new mock::MockBlockBasedTableTester(filter);
KeyMaker km(state.range(2));
std::unique_ptr<const char[]> owner;
const int64_t kEntryNum = state.range(3);
auto rnd = Random32(12345);
uint32_t filter_num = rnd.Next();
std::unique_ptr<FilterBitsBuilder> builder(tester->GetBuilder());
for (uint32_t i = 0; i < kEntryNum; i++) {
builder->AddKey(km.Get(filter_num, i));
}
auto data = builder->Finish(&owner);
auto reader = filter->GetFilterBitsReader(data);
// run test
uint32_t i = 0;
for (auto _ : state) {
i++;
i = i % kEntryNum;
reader->MayMatch(km.Get(filter_num, i));
}
}
BENCHMARK(FilterQueryPositive)->Apply(CustomArguments);
static void FilterQueryNegative(benchmark::State &state) {
// setup data
auto filter = new BloomFilterPolicy(
static_cast<double>(state.range(1)),
static_cast<BloomFilterPolicy::Mode>(state.range(0)));
auto tester = new mock::MockBlockBasedTableTester(filter);
KeyMaker km(state.range(2));
std::unique_ptr<const char[]> owner;
const int64_t kEntryNum = state.range(3);
auto rnd = Random32(12345);
uint32_t filter_num = rnd.Next();
std::unique_ptr<FilterBitsBuilder> builder(tester->GetBuilder());
for (uint32_t i = 0; i < kEntryNum; i++) {
builder->AddKey(km.Get(filter_num, i));
}
auto data = builder->Finish(&owner);
auto reader = filter->GetFilterBitsReader(data);
// run test
uint32_t i = 0;
double fp_cnt = 0;
for (auto _ : state) {
i++;
auto result = reader->MayMatch(km.Get(filter_num + 1, i));
if (result) {
fp_cnt++;
}
}
state.counters["FP %"] =
benchmark::Counter(fp_cnt * 100, benchmark::Counter::kAvgIterations);
}
BENCHMARK(FilterQueryNegative)->Apply(CustomArguments);
} // namespace ROCKSDB_NAMESPACE
BENCHMARK_MAIN();

@ -566,6 +566,8 @@ TEST_MAIN_SOURCES = \
TEST_MAIN_SOURCES_C = \
db/c_test.c \
MICROBENCH_SOURCES = \
microbench/ribbon_bench.cc \
JNI_NATIVE_SOURCES = \
java/rocksjni/backupenginejni.cc \

Loading…
Cancel
Save