Move delete and free outside of crtical section

Summary: Split Unref into two parts -> cheap and expensive. Try to call expensive Unref outside of critical section to decrease lock contention.

Test Plan: unittests

Reviewers: dhruba, haobo

Reviewed By: dhruba

CC: leveldb, kailiu

Differential Revision: https://reviews.facebook.net/D13299
main
Igor Canadi 11 years ago
parent 1a8c1b0817
commit fa46ddb41f
  1. 106
      util/cache.cc

@ -5,6 +5,7 @@
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include <list>
#include "rocksdb/cache.h"
#include "port/port.h"
@ -151,7 +152,11 @@ class LRUCache {
private:
void LRU_Remove(LRUHandle* e);
void LRU_Append(LRUHandle* e);
void Unref(LRUHandle* e);
// Just reduce the reference count by 1.
// Return true if last reference
bool Unref(LRUHandle* e);
// Call deleter and free
void FreeEntry(LRUHandle* e);
// Initialized before use.
size_t capacity_;
@ -180,19 +185,23 @@ LRUCache::~LRUCache() {
for (LRUHandle* e = lru_.next; e != &lru_; ) {
LRUHandle* next = e->next;
assert(e->refs == 1); // Error if caller has an unreleased handle
Unref(e);
if (Unref(e)) {
FreeEntry(e);
}
e = next;
}
}
void LRUCache::Unref(LRUHandle* e) {
bool LRUCache::Unref(LRUHandle* e) {
assert(e->refs > 0);
e->refs--;
if (e->refs <= 0) {
return e->refs == 0;
}
(*e->deleter)(e->key(), e->value);
free(e);
}
void LRUCache::FreeEntry(LRUHandle* e) {
assert(e->refs == 0);
(*e->deleter)(e->key(), e->value);
free(e);
}
void LRUCache::LRU_Remove(LRUHandle* e) {
@ -222,48 +231,79 @@ Cache::Handle* LRUCache::Lookup(const Slice& key, uint32_t hash) {
}
void LRUCache::Release(Cache::Handle* handle) {
MutexLock l(&mutex_);
Unref(reinterpret_cast<LRUHandle*>(handle));
LRUHandle* e = reinterpret_cast<LRUHandle*>(handle);
bool last_reference = false;
{
MutexLock l(&mutex_);
last_reference = Unref(e);
}
if (last_reference) {
FreeEntry(e);
}
}
Cache::Handle* LRUCache::Insert(
const Slice& key, uint32_t hash, void* value, size_t charge,
void (*deleter)(const Slice& key, void* value)) {
MutexLock l(&mutex_);
LRUHandle* e = reinterpret_cast<LRUHandle*>(
malloc(sizeof(LRUHandle)-1 + key.size()));
e->value = value;
e->deleter = deleter;
e->charge = charge;
e->key_length = key.size();
e->hash = hash;
e->refs = 2; // One from LRUCache, one for the returned handle
memcpy(e->key_data, key.data(), key.size());
LRU_Append(e);
LRUHandle* old = table_.Insert(e);
if (old != nullptr) {
LRU_Remove(old);
Unref(old);
std::list<LRUHandle*> last_reference_list;
{
MutexLock l(&mutex_);
e->value = value;
e->deleter = deleter;
e->charge = charge;
e->key_length = key.size();
e->hash = hash;
e->refs = 2; // One from LRUCache, one for the returned handle
memcpy(e->key_data, key.data(), key.size());
LRU_Append(e);
LRUHandle* old = table_.Insert(e);
if (old != nullptr) {
LRU_Remove(old);
if (Unref(old)) {
last_reference_list.push_back(old);
}
}
while (usage_ > capacity_ && lru_.next != &lru_) {
LRUHandle* old = lru_.next;
LRU_Remove(old);
table_.Remove(old->key(), old->hash);
if (Unref(old)) {
last_reference_list.push_back(old);
}
}
}
while (usage_ > capacity_ && lru_.next != &lru_) {
LRUHandle* old = lru_.next;
LRU_Remove(old);
table_.Remove(old->key(), old->hash);
Unref(old);
// we free the entries here outside of mutex for
// performance reasons
for (auto entry : last_reference_list) {
FreeEntry(entry);
}
return reinterpret_cast<Cache::Handle*>(e);
}
void LRUCache::Erase(const Slice& key, uint32_t hash) {
MutexLock l(&mutex_);
LRUHandle* e = table_.Remove(key, hash);
if (e != nullptr) {
LRU_Remove(e);
Unref(e);
LRUHandle* e;
bool last_reference = false;
{
MutexLock l(&mutex_);
e = table_.Remove(key, hash);
if (e != nullptr) {
LRU_Remove(e);
last_reference = Unref(e);
}
}
// mutex not held here
// last_reference will only be true if e != nullptr
if (last_reference) {
FreeEntry(e);
}
}

Loading…
Cancel
Save