Run clang format against files under example/, memory/ and memtable/ folders (#10893)

Summary:
**Context/Summary:**
Run the following to format
```
find ./examples -iname *.h -o -iname *.cc | xargs clang-format -i
find ./memory -iname *.h -o -iname *.cc | xargs clang-format -i
find ./memtable -iname *.h -o -iname *.cc | xargs clang-format -i
```

**Test**
- Manual inspection to ensure changes are cosmetic only
- CI

Pull Request resolved: https://github.com/facebook/rocksdb/pull/10893

Reviewed By: jay-zhuang

Differential Revision: D40779187

Pulled By: hx235

fbshipit-source-id: 529cbb0f0fbd698d95817e8c42fe3ce32254d9b0
main
Hui Xiao 2 years ago committed by Facebook GitHub Bot
parent 7867a1112b
commit 08a63ad10b
  1. 6
      examples/column_families_example.cc
  2. 60
      examples/compact_files_example.cc
  3. 2
      examples/optimistic_transaction_example.cc
  4. 2
      examples/simple_example.cc
  5. 1
      memory/allocator.h
  6. 2
      memory/arena_test.cc
  7. 2
      memory/concurrent_arena.cc
  8. 1
      memory/concurrent_arena.h
  9. 1
      memtable/alloc_tracker.cc
  10. 37
      memtable/hash_linklist_rep.cc
  11. 28
      memtable/hash_skiplist_rep.cc
  12. 21
      memtable/inlineskiplist.h
  13. 6
      memtable/inlineskiplist_test.cc
  14. 4
      memtable/memtablerep_bench.cc
  15. 58
      memtable/skiplist.h
  16. 10
      memtable/skiplist_test.cc
  17. 143
      memtable/skiplistrep.cc
  18. 2
      memtable/stl_wrappers.h
  19. 44
      memtable/vectorrep.cc
  20. 1
      memtable/write_buffer_manager_test.cc

@ -7,8 +7,8 @@
#include <vector> #include <vector>
#include "rocksdb/db.h" #include "rocksdb/db.h"
#include "rocksdb/slice.h"
#include "rocksdb/options.h" #include "rocksdb/options.h"
#include "rocksdb/slice.h"
#if defined(OS_WIN) #if defined(OS_WIN)
std::string kDBPath = "C:\\Windows\\TEMP\\rocksdb_column_families_example"; std::string kDBPath = "C:\\Windows\\TEMP\\rocksdb_column_families_example";
@ -52,8 +52,8 @@ int main() {
column_families.push_back(ColumnFamilyDescriptor( column_families.push_back(ColumnFamilyDescriptor(
ROCKSDB_NAMESPACE::kDefaultColumnFamilyName, ColumnFamilyOptions())); ROCKSDB_NAMESPACE::kDefaultColumnFamilyName, ColumnFamilyOptions()));
// open the new one, too // open the new one, too
column_families.push_back(ColumnFamilyDescriptor( column_families.push_back(
"new_cf", ColumnFamilyOptions())); ColumnFamilyDescriptor("new_cf", ColumnFamilyOptions()));
std::vector<ColumnFamilyHandle*> handles; std::vector<ColumnFamilyHandle*> handles;
s = DB::Open(DBOptions(), kDBPath, column_families, &handles, &db); s = DB::Open(DBOptions(), kDBPath, column_families, &handles, &db);
assert(s.ok()); assert(s.ok());

@ -8,6 +8,7 @@
#include <mutex> #include <mutex>
#include <string> #include <string>
#include "rocksdb/db.h" #include "rocksdb/db.h"
#include "rocksdb/env.h" #include "rocksdb/env.h"
#include "rocksdb/options.h" #include "rocksdb/options.h"
@ -39,29 +40,27 @@ class Compactor : public EventListener {
// and column family. It is the caller's responsibility to // and column family. It is the caller's responsibility to
// destroy the returned CompactionTask. Returns "nullptr" // destroy the returned CompactionTask. Returns "nullptr"
// if it cannot find a proper compaction task. // if it cannot find a proper compaction task.
virtual CompactionTask* PickCompaction( virtual CompactionTask* PickCompaction(DB* db,
DB* db, const std::string& cf_name) = 0; const std::string& cf_name) = 0;
// Schedule and run the specified compaction task in background. // Schedule and run the specified compaction task in background.
virtual void ScheduleCompaction(CompactionTask *task) = 0; virtual void ScheduleCompaction(CompactionTask* task) = 0;
}; };
// Example structure that describes a compaction task. // Example structure that describes a compaction task.
struct CompactionTask { struct CompactionTask {
CompactionTask( CompactionTask(DB* _db, Compactor* _compactor,
DB* _db, Compactor* _compactor, const std::string& _column_family_name,
const std::string& _column_family_name, const std::vector<std::string>& _input_file_names,
const std::vector<std::string>& _input_file_names, const int _output_level,
const int _output_level, const CompactionOptions& _compact_options, bool _retry_on_fail)
const CompactionOptions& _compact_options, : db(_db),
bool _retry_on_fail) compactor(_compactor),
: db(_db), column_family_name(_column_family_name),
compactor(_compactor), input_file_names(_input_file_names),
column_family_name(_column_family_name), output_level(_output_level),
input_file_names(_input_file_names), compact_options(_compact_options),
output_level(_output_level), retry_on_fail(_retry_on_fail) {}
compact_options(_compact_options),
retry_on_fail(_retry_on_fail) {}
DB* db; DB* db;
Compactor* compactor; Compactor* compactor;
const std::string& column_family_name; const std::string& column_family_name;
@ -77,15 +76,13 @@ class FullCompactor : public Compactor {
public: public:
explicit FullCompactor(const Options options) : options_(options) { explicit FullCompactor(const Options options) : options_(options) {
compact_options_.compression = options_.compression; compact_options_.compression = options_.compression;
compact_options_.output_file_size_limit = compact_options_.output_file_size_limit = options_.target_file_size_base;
options_.target_file_size_base;
} }
// When flush happens, it determines whether to trigger compaction. If // When flush happens, it determines whether to trigger compaction. If
// triggered_writes_stop is true, it will also set the retry flag of // triggered_writes_stop is true, it will also set the retry flag of
// compaction-task to true. // compaction-task to true.
void OnFlushCompleted( void OnFlushCompleted(DB* db, const FlushJobInfo& info) override {
DB* db, const FlushJobInfo& info) override {
CompactionTask* task = PickCompaction(db, info.cf_name); CompactionTask* task = PickCompaction(db, info.cf_name);
if (task != nullptr) { if (task != nullptr) {
if (info.triggered_writes_stop) { if (info.triggered_writes_stop) {
@ -97,8 +94,7 @@ class FullCompactor : public Compactor {
} }
// Always pick a compaction which includes all files whenever possible. // Always pick a compaction which includes all files whenever possible.
CompactionTask* PickCompaction( CompactionTask* PickCompaction(DB* db, const std::string& cf_name) override {
DB* db, const std::string& cf_name) override {
ColumnFamilyMetaData cf_meta; ColumnFamilyMetaData cf_meta;
db->GetColumnFamilyMetaData(&cf_meta); db->GetColumnFamilyMetaData(&cf_meta);
@ -111,9 +107,8 @@ class FullCompactor : public Compactor {
input_file_names.push_back(file.name); input_file_names.push_back(file.name);
} }
} }
return new CompactionTask( return new CompactionTask(db, this, cf_name, input_file_names,
db, this, cf_name, input_file_names, options_.num_levels - 1, compact_options_, false);
options_.num_levels - 1, compact_options_, false);
} }
// Schedule the specified compaction task in background. // Schedule the specified compaction task in background.
@ -127,16 +122,14 @@ class FullCompactor : public Compactor {
assert(task); assert(task);
assert(task->db); assert(task->db);
Status s = task->db->CompactFiles( Status s = task->db->CompactFiles(
task->compact_options, task->compact_options, task->input_file_names, task->output_level);
task->input_file_names,
task->output_level);
printf("CompactFiles() finished with status %s\n", s.ToString().c_str()); printf("CompactFiles() finished with status %s\n", s.ToString().c_str());
if (!s.ok() && !s.IsIOError() && task->retry_on_fail) { if (!s.ok() && !s.IsIOError() && task->retry_on_fail) {
// If a compaction task with its retry_on_fail=true failed, // If a compaction task with its retry_on_fail=true failed,
// try to schedule another compaction in case the reason // try to schedule another compaction in case the reason
// is not an IO error. // is not an IO error.
CompactionTask* new_task = task->compactor->PickCompaction( CompactionTask* new_task =
task->db, task->column_family_name); task->compactor->PickCompaction(task->db, task->column_family_name);
task->compactor->ScheduleCompaction(new_task); task->compactor->ScheduleCompaction(new_task);
} }
} }
@ -167,14 +160,13 @@ int main() {
// because of options.level0_stop_writes_trigger // because of options.level0_stop_writes_trigger
for (int i = 1000; i < 99999; ++i) { for (int i = 1000; i < 99999; ++i) {
db->Put(WriteOptions(), std::to_string(i), db->Put(WriteOptions(), std::to_string(i),
std::string(500, 'a' + (i % 26))); std::string(500, 'a' + (i % 26)));
} }
// verify the values are still there // verify the values are still there
std::string value; std::string value;
for (int i = 1000; i < 99999; ++i) { for (int i = 1000; i < 99999; ++i) {
db->Get(ReadOptions(), std::to_string(i), db->Get(ReadOptions(), std::to_string(i), &value);
&value);
assert(value == std::string(500, 'a' + (i % 26))); assert(value == std::string(500, 'a' + (i % 26)));
} }

@ -8,8 +8,8 @@
#include "rocksdb/db.h" #include "rocksdb/db.h"
#include "rocksdb/options.h" #include "rocksdb/options.h"
#include "rocksdb/slice.h" #include "rocksdb/slice.h"
#include "rocksdb/utilities/transaction.h"
#include "rocksdb/utilities/optimistic_transaction_db.h" #include "rocksdb/utilities/optimistic_transaction_db.h"
#include "rocksdb/utilities/transaction.h"
using ROCKSDB_NAMESPACE::DB; using ROCKSDB_NAMESPACE::DB;
using ROCKSDB_NAMESPACE::OptimisticTransactionDB; using ROCKSDB_NAMESPACE::OptimisticTransactionDB;

@ -7,8 +7,8 @@
#include <string> #include <string>
#include "rocksdb/db.h" #include "rocksdb/db.h"
#include "rocksdb/slice.h"
#include "rocksdb/options.h" #include "rocksdb/options.h"
#include "rocksdb/slice.h"
using ROCKSDB_NAMESPACE::DB; using ROCKSDB_NAMESPACE::DB;
using ROCKSDB_NAMESPACE::Options; using ROCKSDB_NAMESPACE::Options;

@ -13,6 +13,7 @@
#pragma once #pragma once
#include <cerrno> #include <cerrno>
#include <cstddef> #include <cstddef>
#include "rocksdb/write_buffer_manager.h" #include "rocksdb/write_buffer_manager.h"
namespace ROCKSDB_NAMESPACE { namespace ROCKSDB_NAMESPACE {

@ -36,7 +36,7 @@ bool CheckMemoryAllocated(size_t allocated, size_t expected) {
void MemoryAllocatedBytesTest(size_t huge_page_size) { void MemoryAllocatedBytesTest(size_t huge_page_size) {
const int N = 17; const int N = 17;
size_t req_sz; // requested size size_t req_sz; // requested size
size_t bsz = 32 * 1024; // block size size_t bsz = 32 * 1024; // block size
size_t expected_memory_allocated; size_t expected_memory_allocated;

@ -8,7 +8,9 @@
// found in the LICENSE file. See the AUTHORS file for names of contributors. // found in the LICENSE file. See the AUTHORS file for names of contributors.
#include "memory/concurrent_arena.h" #include "memory/concurrent_arena.h"
#include <thread> #include <thread>
#include "port/port.h" #include "port/port.h"
#include "util/random.h" #include "util/random.h"

@ -11,6 +11,7 @@
#include <atomic> #include <atomic>
#include <memory> #include <memory>
#include <utility> #include <utility>
#include "memory/allocator.h" #include "memory/allocator.h"
#include "memory/arena.h" #include "memory/arena.h"
#include "port/lang.h" #include "port/lang.h"

@ -8,6 +8,7 @@
// found in the LICENSE file. See the AUTHORS file for names of contributors. // found in the LICENSE file. See the AUTHORS file for names of contributors.
#include <assert.h> #include <assert.h>
#include "memory/allocator.h" #include "memory/allocator.h"
#include "memory/arena.h" #include "memory/arena.h"
#include "rocksdb/write_buffer_manager.h" #include "rocksdb/write_buffer_manager.h"

@ -77,9 +77,7 @@ struct Node {
next_.store(x, std::memory_order_release); next_.store(x, std::memory_order_release);
} }
// No-barrier variants that can be safely used in a few locations. // No-barrier variants that can be safely used in a few locations.
Node* NoBarrier_Next() { Node* NoBarrier_Next() { return next_.load(std::memory_order_relaxed); }
return next_.load(std::memory_order_relaxed);
}
void NoBarrier_SetNext(Node* x) { next_.store(x, std::memory_order_relaxed); } void NoBarrier_SetNext(Node* x) { next_.store(x, std::memory_order_relaxed); }
@ -296,9 +294,9 @@ class HashLinkListRep : public MemTableRep {
// Advance to the first entry with a key >= target // Advance to the first entry with a key >= target
void Seek(const Slice& internal_key, const char* memtable_key) override { void Seek(const Slice& internal_key, const char* memtable_key) override {
const char* encoded_key = const char* encoded_key = (memtable_key != nullptr)
(memtable_key != nullptr) ? ? memtable_key
memtable_key : EncodeKey(&tmp_, internal_key); : EncodeKey(&tmp_, internal_key);
iter_.Seek(encoded_key); iter_.Seek(encoded_key);
} }
@ -324,7 +322,7 @@ class HashLinkListRep : public MemTableRep {
// To destruct with the iterator. // To destruct with the iterator.
std::unique_ptr<MemtableSkipList> full_list_; std::unique_ptr<MemtableSkipList> full_list_;
std::unique_ptr<Allocator> allocator_; std::unique_ptr<Allocator> allocator_;
std::string tmp_; // For passing to EncodeKey std::string tmp_; // For passing to EncodeKey
}; };
class LinkListIterator : public MemTableRep::Iterator { class LinkListIterator : public MemTableRep::Iterator {
@ -365,8 +363,8 @@ class HashLinkListRep : public MemTableRep {
// Advance to the first entry with a key >= target // Advance to the first entry with a key >= target
void Seek(const Slice& internal_key, void Seek(const Slice& internal_key,
const char* /*memtable_key*/) override { const char* /*memtable_key*/) override {
node_ = hash_link_list_rep_->FindGreaterOrEqualInBucket(head_, node_ =
internal_key); hash_link_list_rep_->FindGreaterOrEqualInBucket(head_, internal_key);
} }
// Retreat to the last entry with a key <= target // Retreat to the last entry with a key <= target
@ -398,15 +396,14 @@ class HashLinkListRep : public MemTableRep {
head_ = head; head_ = head;
node_ = nullptr; node_ = nullptr;
} }
private: private:
friend class HashLinkListRep; friend class HashLinkListRep;
const HashLinkListRep* const hash_link_list_rep_; const HashLinkListRep* const hash_link_list_rep_;
Node* head_; Node* head_;
Node* node_; Node* node_;
virtual void SeekToHead() { virtual void SeekToHead() { node_ = head_; }
node_ = head_;
}
}; };
class DynamicIterator : public HashLinkListRep::LinkListIterator { class DynamicIterator : public HashLinkListRep::LinkListIterator {
@ -486,7 +483,7 @@ class HashLinkListRep : public MemTableRep {
// This is used when there wasn't a bucket. It is cheaper than // This is used when there wasn't a bucket. It is cheaper than
// instantiating an empty bucket over which to iterate. // instantiating an empty bucket over which to iterate.
public: public:
EmptyIterator() { } EmptyIterator() {}
bool Valid() const override { return false; } bool Valid() const override { return false; }
const char* key() const override { const char* key() const override {
assert(false); assert(false);
@ -521,7 +518,7 @@ HashLinkListRep::HashLinkListRep(
bucket_entries_logging_threshold_(bucket_entries_logging_threshold), bucket_entries_logging_threshold_(bucket_entries_logging_threshold),
if_log_bucket_dist_when_flash_(if_log_bucket_dist_when_flash) { if_log_bucket_dist_when_flash_(if_log_bucket_dist_when_flash) {
char* mem = allocator_->AllocateAligned(sizeof(Pointer) * bucket_size, char* mem = allocator_->AllocateAligned(sizeof(Pointer) * bucket_size,
huge_page_tlb_size, logger); huge_page_tlb_size, logger);
buckets_ = new (mem) Pointer[bucket_size]; buckets_ = new (mem) Pointer[bucket_size];
@ -530,8 +527,7 @@ HashLinkListRep::HashLinkListRep(
} }
} }
HashLinkListRep::~HashLinkListRep() { HashLinkListRep::~HashLinkListRep() {}
}
KeyHandle HashLinkListRep::Allocate(const size_t len, char** buf) { KeyHandle HashLinkListRep::Allocate(const size_t len, char** buf) {
char* mem = allocator_->AllocateAligned(sizeof(Node) + len); char* mem = allocator_->AllocateAligned(sizeof(Node) + len);
@ -633,9 +629,10 @@ void HashLinkListRep::Insert(KeyHandle handle) {
if (bucket_entries_logging_threshold_ > 0 && if (bucket_entries_logging_threshold_ > 0 &&
header->GetNumEntries() == header->GetNumEntries() ==
static_cast<uint32_t>(bucket_entries_logging_threshold_)) { static_cast<uint32_t>(bucket_entries_logging_threshold_)) {
Info(logger_, "HashLinkedList bucket %" ROCKSDB_PRIszt Info(logger_,
" has more than %d " "HashLinkedList bucket %" ROCKSDB_PRIszt
"entries. Key to insert: %s", " has more than %d "
"entries. Key to insert: %s",
GetHash(transformed), header->GetNumEntries(), GetHash(transformed), header->GetNumEntries(),
GetLengthPrefixedSlice(x->key).ToString(true).c_str()); GetLengthPrefixedSlice(x->key).ToString(true).c_str());
} }
@ -786,7 +783,7 @@ MemTableRep::Iterator* HashLinkListRep::GetIterator(Arena* alloc_arena) {
for (itr.SeekToFirst(); itr.Valid(); itr.Next()) { for (itr.SeekToFirst(); itr.Valid(); itr.Next()) {
list->Insert(itr.key()); list->Insert(itr.key());
count++; count++;
} }
} }
} }
if (if_log_bucket_dist_when_flash_) { if (if_log_bucket_dist_when_flash_) {

@ -118,9 +118,9 @@ class HashSkipListRep : public MemTableRep {
// Advance to the first entry with a key >= target // Advance to the first entry with a key >= target
void Seek(const Slice& internal_key, const char* memtable_key) override { void Seek(const Slice& internal_key, const char* memtable_key) override {
if (list_ != nullptr) { if (list_ != nullptr) {
const char* encoded_key = const char* encoded_key = (memtable_key != nullptr)
(memtable_key != nullptr) ? ? memtable_key
memtable_key : EncodeKey(&tmp_, internal_key); : EncodeKey(&tmp_, internal_key);
iter_.Seek(encoded_key); iter_.Seek(encoded_key);
} }
} }
@ -158,6 +158,7 @@ class HashSkipListRep : public MemTableRep {
iter_.SetList(list); iter_.SetList(list);
own_list_ = false; own_list_ = false;
} }
private: private:
// if list_ is nullptr, we should NEVER call any methods on iter_ // if list_ is nullptr, we should NEVER call any methods on iter_
// if list_ is nullptr, this Iterator is not Valid() // if list_ is nullptr, this Iterator is not Valid()
@ -167,14 +168,14 @@ class HashSkipListRep : public MemTableRep {
// responsible for it's cleaning. This is a poor man's std::shared_ptr // responsible for it's cleaning. This is a poor man's std::shared_ptr
bool own_list_; bool own_list_;
std::unique_ptr<Arena> arena_; std::unique_ptr<Arena> arena_;
std::string tmp_; // For passing to EncodeKey std::string tmp_; // For passing to EncodeKey
}; };
class DynamicIterator : public HashSkipListRep::Iterator { class DynamicIterator : public HashSkipListRep::Iterator {
public: public:
explicit DynamicIterator(const HashSkipListRep& memtable_rep) explicit DynamicIterator(const HashSkipListRep& memtable_rep)
: HashSkipListRep::Iterator(nullptr, false), : HashSkipListRep::Iterator(nullptr, false),
memtable_rep_(memtable_rep) {} memtable_rep_(memtable_rep) {}
// Advance to the first entry with a key >= target // Advance to the first entry with a key >= target
void Seek(const Slice& k, const char* memtable_key) override { void Seek(const Slice& k, const char* memtable_key) override {
@ -208,7 +209,7 @@ class HashSkipListRep : public MemTableRep {
// This is used when there wasn't a bucket. It is cheaper than // This is used when there wasn't a bucket. It is cheaper than
// instantiating an empty bucket over which to iterate. // instantiating an empty bucket over which to iterate.
public: public:
EmptyIterator() { } EmptyIterator() {}
bool Valid() const override { return false; } bool Valid() const override { return false; }
const char* key() const override { const char* key() const override {
assert(false); assert(false);
@ -239,8 +240,8 @@ HashSkipListRep::HashSkipListRep(const MemTableRep::KeyComparator& compare,
transform_(transform), transform_(transform),
compare_(compare), compare_(compare),
allocator_(allocator) { allocator_(allocator) {
auto mem = allocator->AllocateAligned( auto mem =
sizeof(std::atomic<void*>) * bucket_size); allocator->AllocateAligned(sizeof(std::atomic<void*>) * bucket_size);
buckets_ = new (mem) std::atomic<Bucket*>[bucket_size]; buckets_ = new (mem) std::atomic<Bucket*>[bucket_size];
for (size_t i = 0; i < bucket_size_; ++i) { for (size_t i = 0; i < bucket_size_; ++i) {
@ -248,8 +249,7 @@ HashSkipListRep::HashSkipListRep(const MemTableRep::KeyComparator& compare,
} }
} }
HashSkipListRep::~HashSkipListRep() { HashSkipListRep::~HashSkipListRep() {}
}
HashSkipListRep::Bucket* HashSkipListRep::GetInitializedBucket( HashSkipListRep::Bucket* HashSkipListRep::GetInitializedBucket(
const Slice& transformed) { const Slice& transformed) {
@ -281,9 +281,7 @@ bool HashSkipListRep::Contains(const char* key) const {
return bucket->Contains(key); return bucket->Contains(key);
} }
size_t HashSkipListRep::ApproximateMemoryUsage() { size_t HashSkipListRep::ApproximateMemoryUsage() { return 0; }
return 0;
}
void HashSkipListRep::Get(const LookupKey& k, void* callback_args, void HashSkipListRep::Get(const LookupKey& k, void* callback_args,
bool (*callback_func)(void* arg, const char* entry)) { bool (*callback_func)(void* arg, const char* entry)) {
@ -388,7 +386,7 @@ MemTableRepFactory* NewHashSkipListRepFactory(
size_t bucket_count, int32_t skiplist_height, size_t bucket_count, int32_t skiplist_height,
int32_t skiplist_branching_factor) { int32_t skiplist_branching_factor) {
return new HashSkipListRepFactory(bucket_count, skiplist_height, return new HashSkipListRepFactory(bucket_count, skiplist_height,
skiplist_branching_factor); skiplist_branching_factor);
} }
} // namespace ROCKSDB_NAMESPACE } // namespace ROCKSDB_NAMESPACE

@ -43,9 +43,11 @@
#pragma once #pragma once
#include <assert.h> #include <assert.h>
#include <stdlib.h> #include <stdlib.h>
#include <algorithm> #include <algorithm>
#include <atomic> #include <atomic>
#include <type_traits> #include <type_traits>
#include "memory/allocator.h" #include "memory/allocator.h"
#include "port/likely.h" #include "port/likely.h"
#include "port/port.h" #include "port/port.h"
@ -62,8 +64,8 @@ class InlineSkipList {
struct Splice; struct Splice;
public: public:
using DecodedKey = \ using DecodedKey =
typename std::remove_reference<Comparator>::type::DecodedType; typename std::remove_reference<Comparator>::type::DecodedType;
static const uint16_t kMaxPossibleHeight = 32; static const uint16_t kMaxPossibleHeight = 32;
@ -264,9 +266,9 @@ class InlineSkipList {
// point to a node that is before the key, and after should point to // point to a node that is before the key, and after should point to
// a node that is after the key. after should be nullptr if a good after // a node that is after the key. after should be nullptr if a good after
// node isn't conveniently available. // node isn't conveniently available.
template<bool prefetch_before> template <bool prefetch_before>
void FindSpliceForLevel(const DecodedKey& key, Node* before, Node* after, int level, void FindSpliceForLevel(const DecodedKey& key, Node* before, Node* after,
Node** out_prev, Node** out_next); int level, Node** out_prev, Node** out_next);
// Recomputes Splice levels from highest_level (inclusive) down to // Recomputes Splice levels from highest_level (inclusive) down to
// lowest_level (inclusive). // lowest_level (inclusive).
@ -766,8 +768,8 @@ void InlineSkipList<Comparator>::FindSpliceForLevel(const DecodedKey& key,
PREFETCH(next->Next(level), 0, 1); PREFETCH(next->Next(level), 0, 1);
} }
if (prefetch_before == true) { if (prefetch_before == true) {
if (next != nullptr && level>0) { if (next != nullptr && level > 0) {
PREFETCH(next->Next(level-1), 0, 1); PREFETCH(next->Next(level - 1), 0, 1);
} }
} }
assert(before == head_ || next == nullptr || assert(before == head_ || next == nullptr ||
@ -791,7 +793,7 @@ void InlineSkipList<Comparator>::RecomputeSpliceLevels(const DecodedKey& key,
assert(recompute_level <= splice->height_); assert(recompute_level <= splice->height_);
for (int i = recompute_level - 1; i >= 0; --i) { for (int i = recompute_level - 1; i >= 0; --i) {
FindSpliceForLevel<true>(key, splice->prev_[i + 1], splice->next_[i + 1], i, FindSpliceForLevel<true>(key, splice->prev_[i + 1], splice->next_[i + 1], i,
&splice->prev_[i], &splice->next_[i]); &splice->prev_[i], &splice->next_[i]);
} }
} }
@ -881,8 +883,7 @@ bool InlineSkipList<Comparator>::Insert(const char* key, Splice* splice,
// we're pessimistic, recompute everything // we're pessimistic, recompute everything
recompute_height = max_height; recompute_height = max_height;
} }
} else if (KeyIsAfterNode(key_decoded, } else if (KeyIsAfterNode(key_decoded, splice->next_[recompute_height])) {
splice->next_[recompute_height])) {
// key is from after splice // key is from after splice
if (allow_partial_splice_fix) { if (allow_partial_splice_fix) {
Node* bad = splice->next_[recompute_height]; Node* bad = splice->next_[recompute_height];

@ -8,8 +8,10 @@
// found in the LICENSE file. See the AUTHORS file for names of contributors. // found in the LICENSE file. See the AUTHORS file for names of contributors.
#include "memtable/inlineskiplist.h" #include "memtable/inlineskiplist.h"
#include <set> #include <set>
#include <unordered_set> #include <unordered_set>
#include "memory/concurrent_arena.h" #include "memory/concurrent_arena.h"
#include "rocksdb/env.h" #include "rocksdb/env.h"
#include "test_util/testharness.h" #include "test_util/testharness.h"
@ -34,9 +36,7 @@ static Key Decode(const char* key) {
struct TestComparator { struct TestComparator {
using DecodedType = Key; using DecodedType = Key;
static DecodedType decode_key(const char* b) { static DecodedType decode_key(const char* b) { return Decode(b); }
return Decode(b);
}
int operator()(const char* a, const char* b) const { int operator()(const char* a, const char* b) const {
if (Decode(a) < Decode(b)) { if (Decode(a) < Decode(b)) {

@ -467,8 +467,8 @@ class FillBenchmark : public Benchmark {
num_write_ops_per_thread_ = FLAGS_num_operations; num_write_ops_per_thread_ = FLAGS_num_operations;
} }
void RunThreads(std::vector<port::Thread>* /*threads*/, uint64_t* bytes_written, void RunThreads(std::vector<port::Thread>* /*threads*/,
uint64_t* bytes_read, bool /*write*/, uint64_t* bytes_written, uint64_t* bytes_read, bool /*write*/,
uint64_t* read_hits) override { uint64_t* read_hits) override {
FillBenchmarkThread(table_, key_gen_, bytes_written, bytes_read, sequence_, FillBenchmarkThread(table_, key_gen_, bytes_written, bytes_read, sequence_,
num_write_ops_per_thread_, read_hits)(); num_write_ops_per_thread_, read_hits)();

@ -33,14 +33,16 @@
#pragma once #pragma once
#include <assert.h> #include <assert.h>
#include <stdlib.h> #include <stdlib.h>
#include <atomic> #include <atomic>
#include "memory/allocator.h" #include "memory/allocator.h"
#include "port/port.h" #include "port/port.h"
#include "util/random.h" #include "util/random.h"
namespace ROCKSDB_NAMESPACE { namespace ROCKSDB_NAMESPACE {
template<typename Key, class Comparator> template <typename Key, class Comparator>
class SkipList { class SkipList {
private: private:
struct Node; struct Node;
@ -119,7 +121,7 @@ class SkipList {
// Immutable after construction // Immutable after construction
Comparator const compare_; Comparator const compare_;
Allocator* const allocator_; // Allocator used for allocations of nodes Allocator* const allocator_; // Allocator used for allocations of nodes
Node* const head_; Node* const head_;
@ -164,9 +166,9 @@ class SkipList {
}; };
// Implementation details follow // Implementation details follow
template<typename Key, class Comparator> template <typename Key, class Comparator>
struct SkipList<Key, Comparator>::Node { struct SkipList<Key, Comparator>::Node {
explicit Node(const Key& k) : key(k) { } explicit Node(const Key& k) : key(k) {}
Key const key; Key const key;
@ -200,43 +202,43 @@ struct SkipList<Key, Comparator>::Node {
std::atomic<Node*> next_[1]; std::atomic<Node*> next_[1];
}; };
template<typename Key, class Comparator> template <typename Key, class Comparator>
typename SkipList<Key, Comparator>::Node* typename SkipList<Key, Comparator>::Node* SkipList<Key, Comparator>::NewNode(
SkipList<Key, Comparator>::NewNode(const Key& key, int height) { const Key& key, int height) {
char* mem = allocator_->AllocateAligned( char* mem = allocator_->AllocateAligned(
sizeof(Node) + sizeof(std::atomic<Node*>) * (height - 1)); sizeof(Node) + sizeof(std::atomic<Node*>) * (height - 1));
return new (mem) Node(key); return new (mem) Node(key);
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
inline SkipList<Key, Comparator>::Iterator::Iterator(const SkipList* list) { inline SkipList<Key, Comparator>::Iterator::Iterator(const SkipList* list) {
SetList(list); SetList(list);
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
inline void SkipList<Key, Comparator>::Iterator::SetList(const SkipList* list) { inline void SkipList<Key, Comparator>::Iterator::SetList(const SkipList* list) {
list_ = list; list_ = list;
node_ = nullptr; node_ = nullptr;
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
inline bool SkipList<Key, Comparator>::Iterator::Valid() const { inline bool SkipList<Key, Comparator>::Iterator::Valid() const {
return node_ != nullptr; return node_ != nullptr;
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
inline const Key& SkipList<Key, Comparator>::Iterator::key() const { inline const Key& SkipList<Key, Comparator>::Iterator::key() const {
assert(Valid()); assert(Valid());
return node_->key; return node_->key;
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
inline void SkipList<Key, Comparator>::Iterator::Next() { inline void SkipList<Key, Comparator>::Iterator::Next() {
assert(Valid()); assert(Valid());
node_ = node_->Next(0); node_ = node_->Next(0);
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
inline void SkipList<Key, Comparator>::Iterator::Prev() { inline void SkipList<Key, Comparator>::Iterator::Prev() {
// Instead of using explicit "prev" links, we just search for the // Instead of using explicit "prev" links, we just search for the
// last node that falls before key. // last node that falls before key.
@ -247,7 +249,7 @@ inline void SkipList<Key, Comparator>::Iterator::Prev() {
} }
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
inline void SkipList<Key, Comparator>::Iterator::Seek(const Key& target) { inline void SkipList<Key, Comparator>::Iterator::Seek(const Key& target) {
node_ = list_->FindGreaterOrEqual(target); node_ = list_->FindGreaterOrEqual(target);
} }
@ -269,7 +271,7 @@ inline void SkipList<Key, Comparator>::Iterator::SeekToFirst() {
node_ = list_->head_->Next(0); node_ = list_->head_->Next(0);
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
inline void SkipList<Key, Comparator>::Iterator::SeekToLast() { inline void SkipList<Key, Comparator>::Iterator::SeekToLast() {
node_ = list_->FindLast(); node_ = list_->FindLast();
if (node_ == list_->head_) { if (node_ == list_->head_) {
@ -277,7 +279,7 @@ inline void SkipList<Key, Comparator>::Iterator::SeekToLast() {
} }
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
int SkipList<Key, Comparator>::RandomHeight() { int SkipList<Key, Comparator>::RandomHeight() {
auto rnd = Random::GetTLSInstance(); auto rnd = Random::GetTLSInstance();
@ -291,15 +293,15 @@ int SkipList<Key, Comparator>::RandomHeight() {
return height; return height;
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
bool SkipList<Key, Comparator>::KeyIsAfterNode(const Key& key, Node* n) const { bool SkipList<Key, Comparator>::KeyIsAfterNode(const Key& key, Node* n) const {
// nullptr n is considered infinite // nullptr n is considered infinite
return (n != nullptr) && (compare_(n->key, key) < 0); return (n != nullptr) && (compare_(n->key, key) < 0);
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
typename SkipList<Key, Comparator>::Node* SkipList<Key, Comparator>:: typename SkipList<Key, Comparator>::Node*
FindGreaterOrEqual(const Key& key) const { SkipList<Key, Comparator>::FindGreaterOrEqual(const Key& key) const {
// Note: It looks like we could reduce duplication by implementing // Note: It looks like we could reduce duplication by implementing
// this function as FindLessThan(key)->Next(0), but we wouldn't be able // this function as FindLessThan(key)->Next(0), but we wouldn't be able
// to exit early on equality and the result wouldn't even be correct. // to exit early on equality and the result wouldn't even be correct.
@ -315,8 +317,8 @@ typename SkipList<Key, Comparator>::Node* SkipList<Key, Comparator>::
assert(x == head_ || next == nullptr || KeyIsAfterNode(next->key, x)); assert(x == head_ || next == nullptr || KeyIsAfterNode(next->key, x));
// Make sure we haven't overshot during our search // Make sure we haven't overshot during our search
assert(x == head_ || KeyIsAfterNode(key, x)); assert(x == head_ || KeyIsAfterNode(key, x));
int cmp = (next == nullptr || next == last_bigger) int cmp =
? 1 : compare_(next->key, key); (next == nullptr || next == last_bigger) ? 1 : compare_(next->key, key);
if (cmp == 0 || (cmp > 0 && level == 0)) { if (cmp == 0 || (cmp > 0 && level == 0)) {
return next; return next;
} else if (cmp < 0) { } else if (cmp < 0) {
@ -330,7 +332,7 @@ typename SkipList<Key, Comparator>::Node* SkipList<Key, Comparator>::
} }
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
typename SkipList<Key, Comparator>::Node* typename SkipList<Key, Comparator>::Node*
SkipList<Key, Comparator>::FindLessThan(const Key& key, Node** prev) const { SkipList<Key, Comparator>::FindLessThan(const Key& key, Node** prev) const {
Node* x = head_; Node* x = head_;
@ -360,7 +362,7 @@ SkipList<Key, Comparator>::FindLessThan(const Key& key, Node** prev) const {
} }
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
typename SkipList<Key, Comparator>::Node* SkipList<Key, Comparator>::FindLast() typename SkipList<Key, Comparator>::Node* SkipList<Key, Comparator>::FindLast()
const { const {
Node* x = head_; Node* x = head_;
@ -424,14 +426,14 @@ SkipList<Key, Comparator>::SkipList(const Comparator cmp, Allocator* allocator,
// prev_ does not need to be freed, as its life cycle is tied up with // prev_ does not need to be freed, as its life cycle is tied up with
// the allocator as a whole. // the allocator as a whole.
prev_ = reinterpret_cast<Node**>( prev_ = reinterpret_cast<Node**>(
allocator_->AllocateAligned(sizeof(Node*) * kMaxHeight_)); allocator_->AllocateAligned(sizeof(Node*) * kMaxHeight_));
for (int i = 0; i < kMaxHeight_; i++) { for (int i = 0; i < kMaxHeight_; i++) {
head_->SetNext(i, nullptr); head_->SetNext(i, nullptr);
prev_[i] = head_; prev_[i] = head_;
} }
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
void SkipList<Key, Comparator>::Insert(const Key& key) { void SkipList<Key, Comparator>::Insert(const Key& key) {
// fast path for sequential insertion // fast path for sequential insertion
if (!KeyIsAfterNode(key, prev_[0]->NoBarrier_Next(0)) && if (!KeyIsAfterNode(key, prev_[0]->NoBarrier_Next(0)) &&
@ -460,7 +462,7 @@ void SkipList<Key, Comparator>::Insert(const Key& key) {
for (int i = GetMaxHeight(); i < height; i++) { for (int i = GetMaxHeight(); i < height; i++) {
prev_[i] = head_; prev_[i] = head_;
} }
//fprintf(stderr, "Change height from %d to %d\n", max_height_, height); // fprintf(stderr, "Change height from %d to %d\n", max_height_, height);
// It is ok to mutate max_height_ without any synchronization // It is ok to mutate max_height_ without any synchronization
// with concurrent readers. A concurrent reader that observes // with concurrent readers. A concurrent reader that observes
@ -483,7 +485,7 @@ void SkipList<Key, Comparator>::Insert(const Key& key) {
prev_height_ = height; prev_height_ = height;
} }
template<typename Key, class Comparator> template <typename Key, class Comparator>
bool SkipList<Key, Comparator>::Contains(const Key& key) const { bool SkipList<Key, Comparator>::Contains(const Key& key) const {
Node* x = FindGreaterOrEqual(key); Node* x = FindGreaterOrEqual(key);
if (x != nullptr && Equal(key, x->key)) { if (x != nullptr && Equal(key, x->key)) {

@ -8,7 +8,9 @@
// found in the LICENSE file. See the AUTHORS file for names of contributors. // found in the LICENSE file. See the AUTHORS file for names of contributors.
#include "memtable/skiplist.h" #include "memtable/skiplist.h"
#include <set> #include <set>
#include "memory/arena.h" #include "memory/arena.h"
#include "rocksdb/env.h" #include "rocksdb/env.h"
#include "test_util/testharness.h" #include "test_util/testharness.h"
@ -169,7 +171,7 @@ class ConcurrentTest {
static uint64_t hash(Key key) { return key & 0xff; } static uint64_t hash(Key key) { return key & 0xff; }
static uint64_t HashNumbers(uint64_t k, uint64_t g) { static uint64_t HashNumbers(uint64_t k, uint64_t g) {
uint64_t data[2] = { k, g }; uint64_t data[2] = {k, g};
return Hash(reinterpret_cast<char*>(data), sizeof(data), 0); return Hash(reinterpret_cast<char*>(data), sizeof(data), 0);
} }
@ -311,11 +313,7 @@ class TestState {
int seed_; int seed_;
std::atomic<bool> quit_flag_; std::atomic<bool> quit_flag_;
enum ReaderState { enum ReaderState { STARTING, RUNNING, DONE };
STARTING,
RUNNING,
DONE
};
explicit TestState(int s) explicit TestState(int s)
: seed_(s), quit_flag_(false), state_(STARTING), state_cv_(&mu_) {} : seed_(s), quit_flag_(false), state_(STARTING), state_cv_(&mu_) {}

@ -21,74 +21,76 @@ class SkipListRep : public MemTableRep {
const size_t lookahead_; const size_t lookahead_;
friend class LookaheadIterator; friend class LookaheadIterator;
public:
explicit SkipListRep(const MemTableRep::KeyComparator& compare, public:
Allocator* allocator, const SliceTransform* transform, explicit SkipListRep(const MemTableRep::KeyComparator& compare,
const size_t lookahead) Allocator* allocator, const SliceTransform* transform,
: MemTableRep(allocator), const size_t lookahead)
skip_list_(compare, allocator), : MemTableRep(allocator),
cmp_(compare), skip_list_(compare, allocator),
transform_(transform), cmp_(compare),
lookahead_(lookahead) {} transform_(transform),
lookahead_(lookahead) {}
KeyHandle Allocate(const size_t len, char** buf) override {
*buf = skip_list_.AllocateKey(len); KeyHandle Allocate(const size_t len, char** buf) override {
return static_cast<KeyHandle>(*buf); *buf = skip_list_.AllocateKey(len);
} return static_cast<KeyHandle>(*buf);
}
// Insert key into the list. // Insert key into the list.
// REQUIRES: nothing that compares equal to key is currently in the list. // REQUIRES: nothing that compares equal to key is currently in the list.
void Insert(KeyHandle handle) override { void Insert(KeyHandle handle) override {
skip_list_.Insert(static_cast<char*>(handle)); skip_list_.Insert(static_cast<char*>(handle));
} }
bool InsertKey(KeyHandle handle) override { bool InsertKey(KeyHandle handle) override {
return skip_list_.Insert(static_cast<char*>(handle)); return skip_list_.Insert(static_cast<char*>(handle));
} }
void InsertWithHint(KeyHandle handle, void** hint) override { void InsertWithHint(KeyHandle handle, void** hint) override {
skip_list_.InsertWithHint(static_cast<char*>(handle), hint); skip_list_.InsertWithHint(static_cast<char*>(handle), hint);
} }
bool InsertKeyWithHint(KeyHandle handle, void** hint) override { bool InsertKeyWithHint(KeyHandle handle, void** hint) override {
return skip_list_.InsertWithHint(static_cast<char*>(handle), hint); return skip_list_.InsertWithHint(static_cast<char*>(handle), hint);
} }
void InsertWithHintConcurrently(KeyHandle handle, void** hint) override { void InsertWithHintConcurrently(KeyHandle handle, void** hint) override {
skip_list_.InsertWithHintConcurrently(static_cast<char*>(handle), hint); skip_list_.InsertWithHintConcurrently(static_cast<char*>(handle), hint);
} }
bool InsertKeyWithHintConcurrently(KeyHandle handle, void** hint) override { bool InsertKeyWithHintConcurrently(KeyHandle handle, void** hint) override {
return skip_list_.InsertWithHintConcurrently(static_cast<char*>(handle), return skip_list_.InsertWithHintConcurrently(static_cast<char*>(handle),
hint); hint);
} }
void InsertConcurrently(KeyHandle handle) override { void InsertConcurrently(KeyHandle handle) override {
skip_list_.InsertConcurrently(static_cast<char*>(handle)); skip_list_.InsertConcurrently(static_cast<char*>(handle));
} }
bool InsertKeyConcurrently(KeyHandle handle) override { bool InsertKeyConcurrently(KeyHandle handle) override {
return skip_list_.InsertConcurrently(static_cast<char*>(handle)); return skip_list_.InsertConcurrently(static_cast<char*>(handle));
} }
// Returns true iff an entry that compares equal to key is in the list. // Returns true iff an entry that compares equal to key is in the list.
bool Contains(const char* key) const override { bool Contains(const char* key) const override {
return skip_list_.Contains(key); return skip_list_.Contains(key);
} }
size_t ApproximateMemoryUsage() override { size_t ApproximateMemoryUsage() override {
// All memory is allocated through allocator; nothing to report here // All memory is allocated through allocator; nothing to report here
return 0; return 0;
} }
void Get(const LookupKey& k, void* callback_args, void Get(const LookupKey& k, void* callback_args,
bool (*callback_func)(void* arg, const char* entry)) override { bool (*callback_func)(void* arg, const char* entry)) override {
SkipListRep::Iterator iter(&skip_list_); SkipListRep::Iterator iter(&skip_list_);
Slice dummy_slice; Slice dummy_slice;
for (iter.Seek(dummy_slice, k.memtable_key().data()); for (iter.Seek(dummy_slice, k.memtable_key().data());
iter.Valid() && callback_func(callback_args, iter.key()); iter.Next()) { iter.Valid() && callback_func(callback_args, iter.key());
} iter.Next()) {
} }
}
uint64_t ApproximateNumEntries(const Slice& start_ikey, uint64_t ApproximateNumEntries(const Slice& start_ikey,
const Slice& end_ikey) override { const Slice& end_ikey) override {
@ -218,7 +220,7 @@ public:
void SeekToLast() override { iter_.SeekToLast(); } void SeekToLast() override { iter_.SeekToLast(); }
protected: protected:
std::string tmp_; // For passing to EncodeKey std::string tmp_; // For passing to EncodeKey
}; };
// Iterator over the contents of a skip list which also keeps track of the // Iterator over the contents of a skip list which also keeps track of the
@ -227,8 +229,8 @@ public:
// the target key hasn't been found. // the target key hasn't been found.
class LookaheadIterator : public MemTableRep::Iterator { class LookaheadIterator : public MemTableRep::Iterator {
public: public:
explicit LookaheadIterator(const SkipListRep& rep) : explicit LookaheadIterator(const SkipListRep& rep)
rep_(rep), iter_(&rep_.skip_list_), prev_(iter_) {} : rep_(rep), iter_(&rep_.skip_list_), prev_(iter_) {}
~LookaheadIterator() override {} ~LookaheadIterator() override {}
@ -271,9 +273,9 @@ public:
} }
void Seek(const Slice& internal_key, const char* memtable_key) override { void Seek(const Slice& internal_key, const char* memtable_key) override {
const char *encoded_key = const char* encoded_key = (memtable_key != nullptr)
(memtable_key != nullptr) ? ? memtable_key
memtable_key : EncodeKey(&tmp_, internal_key); : EncodeKey(&tmp_, internal_key);
if (prev_.Valid() && rep_.cmp_(encoded_key, prev_.key()) >= 0) { if (prev_.Valid() && rep_.cmp_(encoded_key, prev_.key()) >= 0) {
// prev_.key() is smaller or equal to our target key; do a quick // prev_.key() is smaller or equal to our target key; do a quick
@ -313,7 +315,7 @@ public:
} }
protected: protected:
std::string tmp_; // For passing to EncodeKey std::string tmp_; // For passing to EncodeKey
private: private:
const SkipListRep& rep_; const SkipListRep& rep_;
@ -323,19 +325,20 @@ public:
MemTableRep::Iterator* GetIterator(Arena* arena = nullptr) override { MemTableRep::Iterator* GetIterator(Arena* arena = nullptr) override {
if (lookahead_ > 0) { if (lookahead_ > 0) {
void *mem = void* mem =
arena ? arena->AllocateAligned(sizeof(SkipListRep::LookaheadIterator)) arena ? arena->AllocateAligned(sizeof(SkipListRep::LookaheadIterator))
: operator new(sizeof(SkipListRep::LookaheadIterator)); :
operator new(sizeof(SkipListRep::LookaheadIterator));
return new (mem) SkipListRep::LookaheadIterator(*this); return new (mem) SkipListRep::LookaheadIterator(*this);
} else { } else {
void *mem = void* mem = arena ? arena->AllocateAligned(sizeof(SkipListRep::Iterator))
arena ? arena->AllocateAligned(sizeof(SkipListRep::Iterator)) :
: operator new(sizeof(SkipListRep::Iterator)); operator new(sizeof(SkipListRep::Iterator));
return new (mem) SkipListRep::Iterator(&skip_list_); return new (mem) SkipListRep::Iterator(&skip_list_);
} }
} }
}; };
} } // namespace
static std::unordered_map<std::string, OptionTypeInfo> skiplist_factory_info = { static std::unordered_map<std::string, OptionTypeInfo> skiplist_factory_info = {
#ifndef ROCKSDB_LITE #ifndef ROCKSDB_LITE

@ -29,5 +29,5 @@ struct Compare : private Base {
} }
}; };
} } // namespace stl_wrappers
} // namespace ROCKSDB_NAMESPACE } // namespace ROCKSDB_NAMESPACE

@ -48,13 +48,14 @@ class VectorRep : public MemTableRep {
std::shared_ptr<std::vector<const char*>> bucket_; std::shared_ptr<std::vector<const char*>> bucket_;
std::vector<const char*>::const_iterator mutable cit_; std::vector<const char*>::const_iterator mutable cit_;
const KeyComparator& compare_; const KeyComparator& compare_;
std::string tmp_; // For passing to EncodeKey std::string tmp_; // For passing to EncodeKey
bool mutable sorted_; bool mutable sorted_;
void DoSort() const; void DoSort() const;
public: public:
explicit Iterator(class VectorRep* vrep, explicit Iterator(class VectorRep* vrep,
std::shared_ptr<std::vector<const char*>> bucket, std::shared_ptr<std::vector<const char*>> bucket,
const KeyComparator& compare); const KeyComparator& compare);
// Initialize an iterator over the specified collection. // Initialize an iterator over the specified collection.
// The returned iterator is not valid. // The returned iterator is not valid.
@ -123,12 +124,10 @@ void VectorRep::MarkReadOnly() {
} }
size_t VectorRep::ApproximateMemoryUsage() { size_t VectorRep::ApproximateMemoryUsage() {
return return sizeof(bucket_) + sizeof(*bucket_) +
sizeof(bucket_) + sizeof(*bucket_) + bucket_->size() *
bucket_->size() * sizeof(
sizeof( std::remove_reference<decltype(*bucket_)>::type::value_type);
std::remove_reference<decltype(*bucket_)>::type::value_type
);
} }
VectorRep::VectorRep(const KeyComparator& compare, Allocator* allocator, VectorRep::VectorRep(const KeyComparator& compare, Allocator* allocator,
@ -142,13 +141,13 @@ VectorRep::VectorRep(const KeyComparator& compare, Allocator* allocator,
} }
VectorRep::Iterator::Iterator(class VectorRep* vrep, VectorRep::Iterator::Iterator(class VectorRep* vrep,
std::shared_ptr<std::vector<const char*>> bucket, std::shared_ptr<std::vector<const char*>> bucket,
const KeyComparator& compare) const KeyComparator& compare)
: vrep_(vrep), : vrep_(vrep),
bucket_(bucket), bucket_(bucket),
cit_(bucket_->end()), cit_(bucket_->end()),
compare_(compare), compare_(compare),
sorted_(false) { } sorted_(false) {}
void VectorRep::Iterator::DoSort() const { void VectorRep::Iterator::DoSort() const {
// vrep is non-null means that we are working on an immutable memtable // vrep is non-null means that we are working on an immutable memtable
@ -216,12 +215,11 @@ void VectorRep::Iterator::Seek(const Slice& user_key,
// Do binary search to find first value not less than the target // Do binary search to find first value not less than the target
const char* encoded_key = const char* encoded_key =
(memtable_key != nullptr) ? memtable_key : EncodeKey(&tmp_, user_key); (memtable_key != nullptr) ? memtable_key : EncodeKey(&tmp_, user_key);
cit_ = std::equal_range(bucket_->begin(), cit_ = std::equal_range(bucket_->begin(), bucket_->end(), encoded_key,
bucket_->end(), [this](const char* a, const char* b) {
encoded_key,
[this] (const char* a, const char* b) {
return compare_(a, b) < 0; return compare_(a, b) < 0;
}).first; })
.first;
} }
// Advance to the first entry with a key <= target // Advance to the first entry with a key <= target
@ -282,7 +280,7 @@ MemTableRep::Iterator* VectorRep::GetIterator(Arena* arena) {
} }
} else { } else {
std::shared_ptr<Bucket> tmp; std::shared_ptr<Bucket> tmp;
tmp.reset(new Bucket(*bucket_)); // make a copy tmp.reset(new Bucket(*bucket_)); // make a copy
if (arena == nullptr) { if (arena == nullptr) {
return new Iterator(nullptr, tmp, compare_); return new Iterator(nullptr, tmp, compare_);
} else { } else {
@ -290,7 +288,7 @@ MemTableRep::Iterator* VectorRep::GetIterator(Arena* arena) {
} }
} }
} }
} // anon namespace } // namespace
static std::unordered_map<std::string, OptionTypeInfo> vector_rep_table_info = { static std::unordered_map<std::string, OptionTypeInfo> vector_rep_table_info = {
{"count", {"count",

@ -8,6 +8,7 @@
// found in the LICENSE file. See the AUTHORS file for names of contributors. // found in the LICENSE file. See the AUTHORS file for names of contributors.
#include "rocksdb/write_buffer_manager.h" #include "rocksdb/write_buffer_manager.h"
#include "test_util/testharness.h" #include "test_util/testharness.h"
namespace ROCKSDB_NAMESPACE { namespace ROCKSDB_NAMESPACE {

Loading…
Cancel
Save