WriteBufferManger doens't cost to cache if no limit is set (#4695)

Summary:
WriteBufferManger is not invoked when allocating memory for memtable if the limit is not set even if a cache is passed. It is inconsistent from the comment syas. Fix it.
Pull Request resolved: https://github.com/facebook/rocksdb/pull/4695

Differential Revision: D13112722

Pulled By: siying

fbshipit-source-id: 0b27eef63867f679cd06033ea56907c0569597f4
main
Siying Dong 6 years ago committed by Facebook Github Bot
parent 9d6d4867ab
commit 13579e8c5a
  1. 16
      db/db_test2.cc
  2. 13
      db/memtable.cc
  3. 2
      include/rocksdb/write_buffer_manager.h
  4. 9
      memtable/alloc_tracker.cc

@ -454,6 +454,22 @@ TEST_F(DBTest2, SharedWriteBufferLimitAcrossDB) {
rocksdb::SyncPoint::GetInstance()->DisableProcessing(); rocksdb::SyncPoint::GetInstance()->DisableProcessing();
} }
TEST_F(DBTest2, TestWriteBufferNoLimitWithCache) {
Options options = CurrentOptions();
options.arena_block_size = 4096;
std::shared_ptr<Cache> cache =
NewLRUCache(LRUCacheOptions(10000000, 1, false, 0.0));
options.write_buffer_size = 50000; // this is never hit
// Use a write buffer total size so that the soft limit is about
// 105000.
options.write_buffer_manager.reset(new WriteBufferManager(0, cache));
Reopen(options);
ASSERT_OK(Put("foo", "bar"));
// One dummy entry is 1MB.
ASSERT_GT(cache->GetUsage(), 500000);
}
namespace { namespace {
void ValidateKeyExistence(DB* db, const std::vector<Slice>& keys_must_exist, void ValidateKeyExistence(DB* db, const std::vector<Slice>& keys_must_exist,
const std::vector<Slice>& keys_must_not_exist) { const std::vector<Slice>& keys_must_not_exist) {

@ -69,12 +69,13 @@ MemTable::MemTable(const InternalKeyComparator& cmp,
refs_(0), refs_(0),
kArenaBlockSize(OptimizeBlockSize(moptions_.arena_block_size)), kArenaBlockSize(OptimizeBlockSize(moptions_.arena_block_size)),
mem_tracker_(write_buffer_manager), mem_tracker_(write_buffer_manager),
arena_( arena_(moptions_.arena_block_size,
moptions_.arena_block_size, (write_buffer_manager != nullptr &&
(write_buffer_manager != nullptr && write_buffer_manager->enabled()) (write_buffer_manager->enabled() ||
? &mem_tracker_ write_buffer_manager->cost_to_cache()))
: nullptr, ? &mem_tracker_
mutable_cf_options.memtable_huge_page_size), : nullptr,
mutable_cf_options.memtable_huge_page_size),
table_(ioptions.memtable_factory->CreateMemTableRep( table_(ioptions.memtable_factory->CreateMemTableRep(
comparator_, &arena_, mutable_cf_options.prefix_extractor.get(), comparator_, &arena_, mutable_cf_options.prefix_extractor.get(),
ioptions.info_log, column_family_id)), ioptions.info_log, column_family_id)),

@ -30,6 +30,8 @@ class WriteBufferManager {
bool enabled() const { return buffer_size_ != 0; } bool enabled() const { return buffer_size_ != 0; }
bool cost_to_cache() const { return cache_rep_ != nullptr; }
// Only valid if enabled() // Only valid if enabled()
size_t memory_usage() const { size_t memory_usage() const {
return memory_used_.load(std::memory_order_relaxed); return memory_used_.load(std::memory_order_relaxed);

@ -24,7 +24,8 @@ AllocTracker::~AllocTracker() { FreeMem(); }
void AllocTracker::Allocate(size_t bytes) { void AllocTracker::Allocate(size_t bytes) {
assert(write_buffer_manager_ != nullptr); assert(write_buffer_manager_ != nullptr);
if (write_buffer_manager_->enabled()) { if (write_buffer_manager_->enabled() ||
write_buffer_manager_->cost_to_cache()) {
bytes_allocated_.fetch_add(bytes, std::memory_order_relaxed); bytes_allocated_.fetch_add(bytes, std::memory_order_relaxed);
write_buffer_manager_->ReserveMem(bytes); write_buffer_manager_->ReserveMem(bytes);
} }
@ -32,7 +33,8 @@ void AllocTracker::Allocate(size_t bytes) {
void AllocTracker::DoneAllocating() { void AllocTracker::DoneAllocating() {
if (write_buffer_manager_ != nullptr && !done_allocating_) { if (write_buffer_manager_ != nullptr && !done_allocating_) {
if (write_buffer_manager_->enabled()) { if (write_buffer_manager_->enabled() ||
write_buffer_manager_->cost_to_cache()) {
write_buffer_manager_->ScheduleFreeMem( write_buffer_manager_->ScheduleFreeMem(
bytes_allocated_.load(std::memory_order_relaxed)); bytes_allocated_.load(std::memory_order_relaxed));
} else { } else {
@ -47,7 +49,8 @@ void AllocTracker::FreeMem() {
DoneAllocating(); DoneAllocating();
} }
if (write_buffer_manager_ != nullptr && !freed_) { if (write_buffer_manager_ != nullptr && !freed_) {
if (write_buffer_manager_->enabled()) { if (write_buffer_manager_->enabled() ||
write_buffer_manager_->cost_to_cache()) {
write_buffer_manager_->FreeMem( write_buffer_manager_->FreeMem(
bytes_allocated_.load(std::memory_order_relaxed)); bytes_allocated_.load(std::memory_order_relaxed));
} else { } else {

Loading…
Cancel
Save