Fix TSAN: avoid arena mode with range deletions

Summary:
The range deletion meta-block iterators weren't getting cleaned up properly since they don't support arena allocation. I didn't implement arena support since, in the general case, each iterator is used only once and separately from all other iterators, so there should be no benefit to data locality.

Anyways, this diff fixes up #2370 by treating range deletion iterators as non-arena-allocated.
Closes https://github.com/facebook/rocksdb/pull/2399

Differential Revision: D5171119

Pulled By: ajkr

fbshipit-source-id: bef6f5c4c5905a124f4993945aed4bd86e2807d8
main
Andrew Kryczka 8 years ago committed by Facebook Github Bot
parent 3a8a848a55
commit 215076ef06
  1. 32
      db/external_sst_file_ingestion_job.cc
  2. 8
      db/memtable_list.cc
  3. 5
      db/memtable_list.h
  4. 13
      db/version_set.cc
  5. 7
      db/version_set.h

@ -381,13 +381,16 @@ Status ExternalSstFileIngestionJob::IngestedFilesOverlapWithMemtables(
sv->imm->AddIterators(ro, &merge_iter_builder);
ScopedArenaIterator memtable_iter(merge_iter_builder.Finish());
MergeIteratorBuilder merge_range_del_iter_builder(
&cfd_->internal_comparator(), &arena);
merge_range_del_iter_builder.AddIterator(
sv->mem->NewRangeTombstoneIterator(ro));
sv->imm->AddRangeTombstoneIterators(ro, &merge_range_del_iter_builder);
ScopedArenaIterator memtable_range_del_iter(
merge_range_del_iter_builder.Finish());
std::vector<InternalIterator*> memtable_range_del_iters;
auto* active_range_del_iter = sv->mem->NewRangeTombstoneIterator(ro);
if (active_range_del_iter != nullptr) {
memtable_range_del_iters.push_back(active_range_del_iter);
}
sv->imm->AddRangeTombstoneIterators(ro, &memtable_range_del_iters);
std::unique_ptr<InternalIterator> memtable_range_del_iter(NewMergingIterator(
&cfd_->internal_comparator(),
memtable_range_del_iters.empty() ? nullptr : &memtable_range_del_iters[0],
static_cast<int>(memtable_range_del_iters.size())));
Status status;
*overlap = false;
@ -632,15 +635,18 @@ Status ExternalSstFileIngestionJob::IngestedFileOverlapWithLevel(
ro.total_order_seek = true;
MergeIteratorBuilder merge_iter_builder(&cfd_->internal_comparator(),
&arena);
MergeIteratorBuilder merge_range_del_iter_builder(
&cfd_->internal_comparator(), &arena);
sv->current->AddIteratorsForLevel(ro, env_options_, &merge_iter_builder, lvl,
nullptr /* range_del_agg */);
sv->current->AddRangeDelIteratorsForLevel(ro, env_options_,
&merge_range_del_iter_builder, lvl);
ScopedArenaIterator level_iter(merge_iter_builder.Finish());
ScopedArenaIterator level_range_del_iter(
merge_range_del_iter_builder.Finish());
std::vector<InternalIterator*> level_range_del_iters;
sv->current->AddRangeDelIteratorsForLevel(ro, env_options_, lvl,
&level_range_del_iters);
std::unique_ptr<InternalIterator> level_range_del_iter(NewMergingIterator(
&cfd_->internal_comparator(),
level_range_del_iters.empty() ? nullptr : &level_range_del_iters[0],
static_cast<int>(level_range_del_iters.size())));
Status status = IngestedFileOverlapWithIteratorRange(
file_to_ingest, level_iter.get(), overlap_with_level);
if (status.ok() && *overlap_with_level == false) {

@ -169,9 +169,13 @@ Status MemTableListVersion::AddRangeTombstoneIterators(
}
Status MemTableListVersion::AddRangeTombstoneIterators(
const ReadOptions& read_opts, MergeIteratorBuilder* merge_iter_builder) {
const ReadOptions& read_opts,
std::vector<InternalIterator*>* range_del_iters) {
for (auto& m : memlist_) {
merge_iter_builder->AddIterator(m->NewRangeTombstoneIterator(read_opts));
auto* range_del_iter = m->NewRangeTombstoneIterator(read_opts);
if (range_del_iter != nullptr) {
range_del_iters->push_back(range_del_iter);
}
}
return Status::OK();
}

@ -84,8 +84,9 @@ class MemTableListVersion {
Status AddRangeTombstoneIterators(const ReadOptions& read_opts, Arena* arena,
RangeDelAggregator* range_del_agg);
Status AddRangeTombstoneIterators(const ReadOptions& read_opts,
MergeIteratorBuilder* merge_iter_builder);
Status AddRangeTombstoneIterators(
const ReadOptions& read_opts,
std::vector<InternalIterator*>* range_del_iters);
void AddIterators(const ReadOptions& options,
std::vector<InternalIterator*>* iterator_list,

@ -866,15 +866,18 @@ void Version::AddIteratorsForLevel(const ReadOptions& read_options,
}
void Version::AddRangeDelIteratorsForLevel(
const ReadOptions& read_options, const EnvOptions& soptions,
MergeIteratorBuilder* merge_iter_builder, int level) {
const ReadOptions& read_options, const EnvOptions& soptions, int level,
std::vector<InternalIterator*>* range_del_iters) {
range_del_iters->clear();
for (size_t i = 0; i < storage_info_.LevelFilesBrief(level).num_files; i++) {
const auto& file = storage_info_.LevelFilesBrief(level).files[i];
merge_iter_builder->AddIterator(
cfd_->table_cache()->NewRangeTombstoneIterator(
auto* range_del_iter = cfd_->table_cache()->NewRangeTombstoneIterator(
read_options, soptions, cfd_->internal_comparator(), file.fd,
cfd_->internal_stats()->GetFileReadHist(level),
false /* skip_filters */, level));
false /* skip_filters */, level);
if (range_del_iter != nullptr) {
range_del_iters->push_back(range_del_iter);
}
}
}

@ -463,10 +463,9 @@ class Version {
MergeIteratorBuilder* merger_iter_builder,
int level, RangeDelAggregator* range_del_agg);
void AddRangeDelIteratorsForLevel(const ReadOptions& read_options,
const EnvOptions& soptions,
MergeIteratorBuilder* merge_iter_builder,
int level);
void AddRangeDelIteratorsForLevel(
const ReadOptions& read_options, const EnvOptions& soptions, int level,
std::vector<InternalIterator*>* range_del_iters);
// Lookup the value for key. If found, store it in *val and
// return OK. Else return a non-OK status.

Loading…
Cancel
Save