Split CompactionFilterWithValueChange

Summary:
The test currently times out when it is run under tsan. This patch split it into 4 tests.
Closes https://github.com/facebook/rocksdb/pull/3047

Differential Revision: D6106515

Pulled By: maysamyabandeh

fbshipit-source-id: 03a28cdf8b1c097be2361b1b0cc3dc1acf2b5d63
main
Maysam Yabandeh 7 years ago committed by Facebook Github Bot
parent d75793d6b4
commit 3ef55d2c7c
  1. 138
      db/db_compaction_filter_test.cc
  2. 5
      db/db_iterator_test.cc

@ -24,6 +24,36 @@ class DBTestCompactionFilter : public DBTestBase {
DBTestCompactionFilter() : DBTestBase("/db_compaction_filter_test") {}
};
// Param variant of DBTestBase::ChangeCompactOptions
class DBTestCompactionFilterWithCompactParam
: public DBTestCompactionFilter,
public ::testing::WithParamInterface<DBTestBase::OptionConfig> {
public:
DBTestCompactionFilterWithCompactParam() : DBTestCompactionFilter() {
option_config_ = GetParam();
Destroy(last_options_);
auto options = CurrentOptions();
if (option_config_ == kDefault || option_config_ == kUniversalCompaction ||
option_config_ == kUniversalCompactionMultiLevel) {
options.create_if_missing = true;
}
if (option_config_ == kLevelSubcompactions ||
option_config_ == kUniversalSubcompactions) {
assert(options.max_subcompactions > 1);
}
TryReopen(options);
}
};
INSTANTIATE_TEST_CASE_P(
DBTestCompactionFilterWithCompactOption,
DBTestCompactionFilterWithCompactParam,
::testing::Values(DBTestBase::OptionConfig::kDefault,
DBTestBase::OptionConfig::kUniversalCompaction,
DBTestBase::OptionConfig::kUniversalCompactionMultiLevel,
DBTestBase::OptionConfig::kLevelSubcompactions,
DBTestBase::OptionConfig::kUniversalSubcompactions));
class KeepFilter : public CompactionFilter {
public:
virtual bool Filter(int level, const Slice& key, const Slice& value,
@ -439,65 +469,63 @@ TEST_F(DBTestCompactionFilter, CompactionFilterDeletesAll) {
}
#endif // ROCKSDB_LITE
TEST_F(DBTestCompactionFilter, CompactionFilterWithValueChange) {
do {
Options options = CurrentOptions();
options.num_levels = 3;
options.compaction_filter_factory =
std::make_shared<ChangeFilterFactory>();
CreateAndReopenWithCF({"pikachu"}, options);
// Write 100K+1 keys, these are written to a few files
// in L0. We do this so that the current snapshot points
// to the 100001 key.The compaction filter is not invoked
// on keys that are visible via a snapshot because we
// anyways cannot delete it.
const std::string value(10, 'x');
for (int i = 0; i < 100001; i++) {
char key[100];
snprintf(key, sizeof(key), "B%010d", i);
Put(1, key, value);
}
TEST_P(DBTestCompactionFilterWithCompactParam,
CompactionFilterWithValueChange) {
Options options = CurrentOptions();
options.num_levels = 3;
options.compaction_filter_factory = std::make_shared<ChangeFilterFactory>();
CreateAndReopenWithCF({"pikachu"}, options);
// push all files to lower levels
ASSERT_OK(Flush(1));
if (option_config_ != kUniversalCompactionMultiLevel &&
option_config_ != kUniversalSubcompactions) {
dbfull()->TEST_CompactRange(0, nullptr, nullptr, handles_[1]);
dbfull()->TEST_CompactRange(1, nullptr, nullptr, handles_[1]);
} else {
dbfull()->CompactRange(CompactRangeOptions(), handles_[1], nullptr,
nullptr);
}
// Write 100K+1 keys, these are written to a few files
// in L0. We do this so that the current snapshot points
// to the 100001 key.The compaction filter is not invoked
// on keys that are visible via a snapshot because we
// anyways cannot delete it.
const std::string value(10, 'x');
for (int i = 0; i < 100001; i++) {
char key[100];
snprintf(key, sizeof(key), "B%010d", i);
Put(1, key, value);
}
// re-write all data again
for (int i = 0; i < 100001; i++) {
char key[100];
snprintf(key, sizeof(key), "B%010d", i);
Put(1, key, value);
}
// push all files to lower levels
ASSERT_OK(Flush(1));
if (option_config_ != kUniversalCompactionMultiLevel &&
option_config_ != kUniversalSubcompactions) {
dbfull()->TEST_CompactRange(0, nullptr, nullptr, handles_[1]);
dbfull()->TEST_CompactRange(1, nullptr, nullptr, handles_[1]);
} else {
dbfull()->CompactRange(CompactRangeOptions(), handles_[1], nullptr,
nullptr);
}
// push all files to lower levels. This should
// invoke the compaction filter for all 100000 keys.
ASSERT_OK(Flush(1));
if (option_config_ != kUniversalCompactionMultiLevel &&
option_config_ != kUniversalSubcompactions) {
dbfull()->TEST_CompactRange(0, nullptr, nullptr, handles_[1]);
dbfull()->TEST_CompactRange(1, nullptr, nullptr, handles_[1]);
} else {
dbfull()->CompactRange(CompactRangeOptions(), handles_[1], nullptr,
nullptr);
}
// re-write all data again
for (int i = 0; i < 100001; i++) {
char key[100];
snprintf(key, sizeof(key), "B%010d", i);
Put(1, key, value);
}
// verify that all keys now have the new value that
// was set by the compaction process.
for (int i = 0; i < 100001; i++) {
char key[100];
snprintf(key, sizeof(key), "B%010d", i);
std::string newvalue = Get(1, key);
ASSERT_EQ(newvalue.compare(NEW_VALUE), 0);
}
} while (ChangeCompactOptions());
// push all files to lower levels. This should
// invoke the compaction filter for all 100000 keys.
ASSERT_OK(Flush(1));
if (option_config_ != kUniversalCompactionMultiLevel &&
option_config_ != kUniversalSubcompactions) {
dbfull()->TEST_CompactRange(0, nullptr, nullptr, handles_[1]);
dbfull()->TEST_CompactRange(1, nullptr, nullptr, handles_[1]);
} else {
dbfull()->CompactRange(CompactRangeOptions(), handles_[1], nullptr,
nullptr);
}
// verify that all keys now have the new value that
// was set by the compaction process.
for (int i = 0; i < 100001; i++) {
char key[100];
snprintf(key, sizeof(key), "B%010d", i);
std::string newvalue = Get(1, key);
ASSERT_EQ(newvalue.compare(NEW_VALUE), 0);
}
}
TEST_F(DBTestCompactionFilter, CompactionFilterWithMergeOperator) {

@ -2023,12 +2023,13 @@ TEST_F(DBIteratorTest, TableFilter) {
// Ensure the table_filter callback is called once for each table.
{
std::set<uint64_t> unseen {1, 2, 3};
std::set<uint64_t> unseen{1, 2, 3};
ReadOptions opts;
opts.table_filter = [&](const TableProperties& props) {
auto it = unseen.find(props.num_entries);
if (it == unseen.end()) {
ADD_FAILURE() << "saw table properties with an unexpected " << props.num_entries << " entries";
ADD_FAILURE() << "saw table properties with an unexpected "
<< props.num_entries << " entries";
} else {
unseen.erase(it);
}

Loading…
Cancel
Save