Merge pull request #256 from iSynaptic/gh-230

Adding format checks to CI build...
master
Jordan Terrell 6 years ago committed by GitHub
commit 5b8686487d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 5
      .travis.yml
  2. 3
      librocksdb-sys/build.rs
  3. 2
      src/backup.rs
  4. 3
      src/compaction_filter.rs
  5. 238
      src/db.rs
  6. 4
      src/lib.rs
  7. 2
      tests/test_compationfilter.rs
  8. 12
      tests/test_db.rs
  9. 25
      tests/test_iterator.rs

@ -22,7 +22,12 @@ addons:
- libclang-3.9-dev - libclang-3.9-dev
- clang-3.9 - clang-3.9
install:
- rustup component add rustfmt
- rustfmt -V
script: script:
- cargo fmt --all -- --check
- cargo test --manifest-path=librocksdb-sys/Cargo.toml - cargo test --manifest-path=librocksdb-sys/Cargo.toml
- cargo test - cargo test

@ -119,7 +119,8 @@ fn build_rocksdb() {
.filter(|file| match *file { .filter(|file| match *file {
"port/port_posix.cc" | "env/env_posix.cc" | "env/io_posix.cc" => false, "port/port_posix.cc" | "env/env_posix.cc" | "env/io_posix.cc" => false,
_ => true, _ => true,
}).collect::<Vec<&'static str>>(); })
.collect::<Vec<&'static str>>();
// Add Windows-specific sources // Add Windows-specific sources
lib_sources.push("port/win/port_win.cc"); lib_sources.push("port/win/port_win.cc");

@ -46,7 +46,7 @@ impl BackupEngine {
"Failed to convert path to CString \ "Failed to convert path to CString \
when opening backup engine" when opening backup engine"
.to_owned(), .to_owned(),
)) ));
} }
}; };

@ -45,7 +45,8 @@ impl<F> CompactionFilterFn for F
where where
F: FnMut(u32, &[u8], &[u8]) -> Decision, F: FnMut(u32, &[u8], &[u8]) -> Decision,
F: Send + 'static, F: Send + 'static,
{} {
}
pub struct CompactionFilterCallback<F> pub struct CompactionFilterCallback<F>
where where

@ -154,7 +154,7 @@ pub struct Snapshot<'a> {
/// ``` /// ```
pub struct DBRawIterator<'a> { pub struct DBRawIterator<'a> {
inner: *mut ffi::rocksdb_iterator_t, inner: *mut ffi::rocksdb_iterator_t,
db: PhantomData<&'a DB> db: PhantomData<&'a DB>,
} }
/// An iterator over a database or column family, with specifiable /// An iterator over a database or column family, with specifiable
@ -214,7 +214,7 @@ impl<'a> DBRawIterator<'a> {
unsafe { unsafe {
DBRawIterator { DBRawIterator {
inner: ffi::rocksdb_create_iterator(db.inner, readopts.inner), inner: ffi::rocksdb_create_iterator(db.inner, readopts.inner),
db: PhantomData db: PhantomData,
} }
} }
} }
@ -227,7 +227,7 @@ impl<'a> DBRawIterator<'a> {
unsafe { unsafe {
Ok(DBRawIterator { Ok(DBRawIterator {
inner: ffi::rocksdb_create_iterator_cf(db.inner, readopts.inner, cf_handle.inner), inner: ffi::rocksdb_create_iterator_cf(db.inner, readopts.inner, cf_handle.inner),
db: PhantomData db: PhantomData,
}) })
} }
} }
@ -603,7 +603,11 @@ impl<'a> Snapshot<'a> {
DBRawIterator::new(self.db, &readopts) DBRawIterator::new(self.db, &readopts)
} }
pub fn raw_iterator_cf_opt(&self, cf_handle: ColumnFamily, mut readopts: ReadOptions) -> Result<DBRawIterator, Error> { pub fn raw_iterator_cf_opt(
&self,
cf_handle: ColumnFamily,
mut readopts: ReadOptions,
) -> Result<DBRawIterator, Error> {
readopts.set_snapshot(self); readopts.set_snapshot(self);
DBRawIterator::new_cf(self.db, cf_handle, &readopts) DBRawIterator::new_cf(self.db, cf_handle, &readopts)
} }
@ -613,17 +617,30 @@ impl<'a> Snapshot<'a> {
self.get_opt(key, readopts) self.get_opt(key, readopts)
} }
pub fn get_cf<K: AsRef<[u8]>>(&self, cf: ColumnFamily, key: K) -> Result<Option<DBVector>, Error> { pub fn get_cf<K: AsRef<[u8]>>(
&self,
cf: ColumnFamily,
key: K,
) -> Result<Option<DBVector>, Error> {
let readopts = ReadOptions::default(); let readopts = ReadOptions::default();
self.get_cf_opt(cf, key.as_ref(), readopts) self.get_cf_opt(cf, key.as_ref(), readopts)
} }
pub fn get_opt<K: AsRef<[u8]>>(&self, key: K, mut readopts: ReadOptions) -> Result<Option<DBVector>, Error> { pub fn get_opt<K: AsRef<[u8]>>(
&self,
key: K,
mut readopts: ReadOptions,
) -> Result<Option<DBVector>, Error> {
readopts.set_snapshot(self); readopts.set_snapshot(self);
self.db.get_opt(key.as_ref(), &readopts) self.db.get_opt(key.as_ref(), &readopts)
} }
pub fn get_cf_opt<K: AsRef<[u8]>>(&self, cf: ColumnFamily, key: K, mut readopts: ReadOptions) -> Result<Option<DBVector>, Error> { pub fn get_cf_opt<K: AsRef<[u8]>>(
&self,
cf: ColumnFamily,
key: K,
mut readopts: ReadOptions,
) -> Result<Option<DBVector>, Error> {
readopts.set_snapshot(self); readopts.set_snapshot(self);
self.db.get_cf_opt(cf, key.as_ref(), &readopts) self.db.get_cf_opt(cf, key.as_ref(), &readopts)
} }
@ -666,11 +683,12 @@ impl DB {
/// Open a database with the given database options and column family names. /// Open a database with the given database options and column family names.
/// ///
/// Column families opened using this function will be created with default `Options`. /// Column families opened using this function will be created with default `Options`.
pub fn open_cf<P, I, N>(opts: &Options, path: P, cfs: I) -> Result<DB, Error> pub fn open_cf<P, I, N>(opts: &Options, path: P, cfs: I) -> Result<DB, Error>
where P: AsRef<Path>, where
I: IntoIterator<Item = N>, P: AsRef<Path>,
N: AsRef<str> { I: IntoIterator<Item = N>,
N: AsRef<str>,
{
let cfs = cfs let cfs = cfs
.into_iter() .into_iter()
.map(|name| ColumnFamilyDescriptor::new(name.as_ref(), Options::default())); .map(|name| ColumnFamilyDescriptor::new(name.as_ref(), Options::default()));
@ -679,17 +697,12 @@ impl DB {
} }
/// Open a database with the given database options and column family descriptors. /// Open a database with the given database options and column family descriptors.
pub fn open_cf_descriptors<P, I>( pub fn open_cf_descriptors<P, I>(opts: &Options, path: P, cfs: I) -> Result<DB, Error>
opts: &Options, where
path: P, P: AsRef<Path>,
cfs: I, I: IntoIterator<Item = ColumnFamilyDescriptor>,
) -> Result<DB, Error> {
where P: AsRef<Path>, let cfs: Vec<_> = cfs.into_iter().collect();
I: IntoIterator<Item = ColumnFamilyDescriptor> {
let cfs: Vec<_> = cfs
.into_iter()
.collect();
let path = path.as_ref(); let path = path.as_ref();
let cpath = match CString::new(path.to_string_lossy().as_bytes()) { let cpath = match CString::new(path.to_string_lossy().as_bytes()) {
@ -699,13 +712,14 @@ impl DB {
"Failed to convert path to CString \ "Failed to convert path to CString \
when opening DB." when opening DB."
.to_owned(), .to_owned(),
)) ));
} }
}; };
if let Err(e) = fs::create_dir_all(&path) { if let Err(e) = fs::create_dir_all(&path) {
return Err(Error::new(format!( return Err(Error::new(format!(
"Failed to create RocksDB directory: `{:?}`.", e "Failed to create RocksDB directory: `{:?}`.",
e
))); )));
} }
@ -764,7 +778,8 @@ impl DB {
} }
for (n, h) in cfs_v.iter().zip(cfhandles) { for (n, h) in cfs_v.iter().zip(cfhandles) {
cf_map.write() cf_map
.write()
.map_err(|e| Error::new(e.to_string()))? .map_err(|e| Error::new(e.to_string()))?
.insert(n.name.clone(), h); .insert(n.name.clone(), h);
} }
@ -838,7 +853,11 @@ impl DB {
self.write_opt(batch, &wo) self.write_opt(batch, &wo)
} }
pub fn get_opt<K: AsRef<[u8]>>(&self, key: K, readopts: &ReadOptions) -> Result<Option<DBVector>, Error> { pub fn get_opt<K: AsRef<[u8]>>(
&self,
key: K,
readopts: &ReadOptions,
) -> Result<Option<DBVector>, Error> {
if readopts.inner.is_null() { if readopts.inner.is_null() {
return Err(Error::new( return Err(Error::new(
"Unable to create RocksDB read options. \ "Unable to create RocksDB read options. \
@ -911,7 +930,11 @@ impl DB {
} }
} }
pub fn get_cf<K: AsRef<[u8]>>(&self, cf: ColumnFamily, key: K) -> Result<Option<DBVector>, Error> { pub fn get_cf<K: AsRef<[u8]>>(
&self,
cf: ColumnFamily,
key: K,
) -> Result<Option<DBVector>, Error> {
self.get_cf_opt(cf, key.as_ref(), &ReadOptions::default()) self.get_cf_opt(cf, key.as_ref(), &ReadOptions::default())
} }
@ -923,7 +946,7 @@ impl DB {
"Failed to convert path to CString \ "Failed to convert path to CString \
when opening rocksdb" when opening rocksdb"
.to_owned(), .to_owned(),
)) ));
} }
}; };
let cf = unsafe { let cf = unsafe {
@ -933,10 +956,12 @@ impl DB {
cname.as_ptr(), cname.as_ptr(),
)); ));
self.cfs.write().map_err(|e| Error::new(e.to_string()))? self.cfs
.write()
.map_err(|e| Error::new(e.to_string()))?
.insert(name.to_string(), cf_handle); .insert(name.to_string(), cf_handle);
ColumnFamily { ColumnFamily {
inner: cf_handle, inner: cf_handle,
db: PhantomData, db: PhantomData,
} }
@ -945,29 +970,29 @@ impl DB {
} }
pub fn drop_cf(&self, name: &str) -> Result<(), Error> { pub fn drop_cf(&self, name: &str) -> Result<(), Error> {
if let Some(cf) = self.cfs.write().map_err(|e| Error::new(e.to_string()))? if let Some(cf) = self
.remove(name) { .cfs
.write()
.map_err(|e| Error::new(e.to_string()))?
.remove(name)
{
unsafe { unsafe {
ffi_try!(ffi::rocksdb_drop_column_family(self.inner, cf,)); ffi_try!(ffi::rocksdb_drop_column_family(self.inner, cf,));
} }
Ok(()) Ok(())
} else { } else {
Err(Error::new( Err(Error::new(
format!("Invalid column family: {}", name).to_owned() format!("Invalid column family: {}", name).to_owned(),
)) ))
} }
} }
/// Return the underlying column family handle. /// Return the underlying column family handle.
pub fn cf_handle(&self, name: &str) -> Option<ColumnFamily> { pub fn cf_handle(&self, name: &str) -> Option<ColumnFamily> {
self.cfs self.cfs.read().ok()?.get(name).map(|h| ColumnFamily {
.read() inner: *h,
.ok()? db: PhantomData,
.get(name) })
.map(|h| ColumnFamily {
inner: *h,
db: PhantomData
})
} }
pub fn iterator(&self, mode: IteratorMode) -> DBIterator { pub fn iterator(&self, mode: IteratorMode) -> DBIterator {
@ -991,7 +1016,11 @@ impl DB {
pub fn prefix_iterator<P: AsRef<[u8]>>(&self, prefix: P) -> DBIterator { pub fn prefix_iterator<P: AsRef<[u8]>>(&self, prefix: P) -> DBIterator {
let mut opts = ReadOptions::default(); let mut opts = ReadOptions::default();
opts.set_prefix_same_as_start(true); opts.set_prefix_same_as_start(true);
DBIterator::new(self, &opts, IteratorMode::From(prefix.as_ref(), Direction::Forward)) DBIterator::new(
self,
&opts,
IteratorMode::From(prefix.as_ref(), Direction::Forward),
)
} }
pub fn iterator_cf( pub fn iterator_cf(
@ -1016,7 +1045,7 @@ impl DB {
pub fn prefix_iterator_cf<P: AsRef<[u8]>>( pub fn prefix_iterator_cf<P: AsRef<[u8]>>(
&self, &self,
cf_handle: ColumnFamily, cf_handle: ColumnFamily,
prefix: P prefix: P,
) -> Result<DBIterator, Error> { ) -> Result<DBIterator, Error> {
let mut opts = ReadOptions::default(); let mut opts = ReadOptions::default();
opts.set_prefix_same_as_start(true); opts.set_prefix_same_as_start(true);
@ -1043,14 +1072,14 @@ impl DB {
} }
pub fn put_opt<K, V>(&self, key: K, value: V, writeopts: &WriteOptions) -> Result<(), Error> pub fn put_opt<K, V>(&self, key: K, value: V, writeopts: &WriteOptions) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let key = key.as_ref(); let key = key.as_ref();
let value = value.as_ref(); let value = value.as_ref();
unsafe { unsafe {
ffi_try!(ffi::rocksdb_put( ffi_try!(ffi::rocksdb_put(
self.inner, self.inner,
writeopts.inner, writeopts.inner,
@ -1069,15 +1098,15 @@ impl DB {
key: K, key: K,
value: V, value: V,
writeopts: &WriteOptions, writeopts: &WriteOptions,
) -> Result<(), Error> ) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let key = key.as_ref(); let key = key.as_ref();
let value = value.as_ref(); let value = value.as_ref();
unsafe { unsafe {
ffi_try!(ffi::rocksdb_put_cf( ffi_try!(ffi::rocksdb_put_cf(
self.inner, self.inner,
writeopts.inner, writeopts.inner,
@ -1091,15 +1120,11 @@ impl DB {
} }
} }
pub fn merge_opt<K, V>( pub fn merge_opt<K, V>(&self, key: K, value: V, writeopts: &WriteOptions) -> Result<(), Error>
&self, where
key: K, K: AsRef<[u8]>,
value: V, V: AsRef<[u8]>,
writeopts: &WriteOptions, {
) -> Result<(), Error>
where K: AsRef<[u8]>,
V: AsRef<[u8]> {
let key = key.as_ref(); let key = key.as_ref();
let value = value.as_ref(); let value = value.as_ref();
@ -1123,14 +1148,14 @@ impl DB {
value: V, value: V,
writeopts: &WriteOptions, writeopts: &WriteOptions,
) -> Result<(), Error> ) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let key = key.as_ref(); let key = key.as_ref();
let value = value.as_ref(); let value = value.as_ref();
unsafe { unsafe {
ffi_try!(ffi::rocksdb_merge_cf( ffi_try!(ffi::rocksdb_merge_cf(
self.inner, self.inner,
writeopts.inner, writeopts.inner,
@ -1144,9 +1169,13 @@ impl DB {
} }
} }
pub fn delete_opt<K: AsRef<[u8]>>(&self, key: K, writeopts: &WriteOptions) -> Result<(), Error> { pub fn delete_opt<K: AsRef<[u8]>>(
&self,
key: K,
writeopts: &WriteOptions,
) -> Result<(), Error> {
let key = key.as_ref(); let key = key.as_ref();
unsafe { unsafe {
ffi_try!(ffi::rocksdb_delete( ffi_try!(ffi::rocksdb_delete(
self.inner, self.inner,
@ -1164,7 +1193,6 @@ impl DB {
key: K, key: K,
writeopts: &WriteOptions, writeopts: &WriteOptions,
) -> Result<(), Error> { ) -> Result<(), Error> {
let key = key.as_ref(); let key = key.as_ref();
unsafe { unsafe {
@ -1180,30 +1208,34 @@ impl DB {
} }
pub fn put<K, V>(&self, key: K, value: V) -> Result<(), Error> pub fn put<K, V>(&self, key: K, value: V) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
self.put_opt(key.as_ref(), value.as_ref(), &WriteOptions::default()) self.put_opt(key.as_ref(), value.as_ref(), &WriteOptions::default())
} }
pub fn put_cf<K, V>(&self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> pub fn put_cf<K, V>(&self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
self.put_cf_opt(cf, key.as_ref(), value.as_ref(), &WriteOptions::default()) self.put_cf_opt(cf, key.as_ref(), value.as_ref(), &WriteOptions::default())
} }
pub fn merge<K, V>(&self, key: K, value: V) -> Result<(), Error> pub fn merge<K, V>(&self, key: K, value: V) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
self.merge_opt(key.as_ref(), value.as_ref(), &WriteOptions::default()) self.merge_opt(key.as_ref(), value.as_ref(), &WriteOptions::default())
} }
pub fn merge_cf<K, V>(&self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> pub fn merge_cf<K, V>(&self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
self.merge_cf_opt(cf, key.as_ref(), value.as_ref(), &WriteOptions::default()) self.merge_cf_opt(cf, key.as_ref(), value.as_ref(), &WriteOptions::default())
} }
@ -1293,9 +1325,10 @@ impl WriteBatch {
/// Insert a value into the database under the given key. /// Insert a value into the database under the given key.
pub fn put<K, V>(&mut self, key: K, value: V) -> Result<(), Error> pub fn put<K, V>(&mut self, key: K, value: V) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let key = key.as_ref(); let key = key.as_ref();
let value = value.as_ref(); let value = value.as_ref();
@ -1311,10 +1344,11 @@ impl WriteBatch {
} }
} }
pub fn put_cf<K, V>(&mut self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> pub fn put_cf<K, V>(&mut self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let key = key.as_ref(); let key = key.as_ref();
let value = value.as_ref(); let value = value.as_ref();
@ -1331,10 +1365,11 @@ impl WriteBatch {
} }
} }
pub fn merge<K, V>(&mut self, key: K, value: V) -> Result<(), Error> pub fn merge<K, V>(&mut self, key: K, value: V) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let key = key.as_ref(); let key = key.as_ref();
let value = value.as_ref(); let value = value.as_ref();
@ -1350,10 +1385,11 @@ impl WriteBatch {
} }
} }
pub fn merge_cf<K, V>(&mut self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> pub fn merge_cf<K, V>(&mut self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error>
where K: AsRef<[u8]>, where
V: AsRef<[u8]> { K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let key = key.as_ref(); let key = key.as_ref();
let value = value.as_ref(); let value = value.as_ref();
@ -1467,7 +1503,7 @@ impl ReadOptions {
pub fn set_iterate_upper_bound<K: AsRef<[u8]>>(&mut self, key: K) { pub fn set_iterate_upper_bound<K: AsRef<[u8]>>(&mut self, key: K) {
let key = key.as_ref(); let key = key.as_ref();
unsafe { unsafe {
ffi::rocksdb_readoptions_set_iterate_upper_bound( ffi::rocksdb_readoptions_set_iterate_upper_bound(
self.inner, self.inner,

@ -71,8 +71,8 @@ mod slice_transform;
pub use compaction_filter::Decision as CompactionDecision; pub use compaction_filter::Decision as CompactionDecision;
pub use db::{ pub use db::{
DBCompactionStyle, DBCompressionType, DBIterator, DBRawIterator, DBRecoveryMode, DBCompactionStyle, DBCompressionType, DBIterator, DBRawIterator, DBRecoveryMode, DBVector,
DBVector, Direction, IteratorMode, ReadOptions, Snapshot, WriteBatch, Direction, IteratorMode, ReadOptions, Snapshot, WriteBatch,
}; };
pub use slice_transform::SliceTransform; pub use slice_transform::SliceTransform;

@ -16,7 +16,7 @@ extern crate rocksdb;
mod util; mod util;
use rocksdb::{CompactionDecision, DB, Options}; use rocksdb::{CompactionDecision, Options, DB};
use util::DBPath; use util::DBPath;
#[cfg(test)] #[cfg(test)]

@ -12,14 +12,14 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
extern crate rocksdb;
extern crate libc; extern crate libc;
extern crate rocksdb;
mod util; mod util;
use libc::{size_t}; use libc::size_t;
use rocksdb::{DB, DBVector, Error, IteratorMode, Options, WriteBatch}; use rocksdb::{DBVector, Error, IteratorMode, Options, WriteBatch, DB};
use util::DBPath; use util::DBPath;
#[test] #[test]
@ -38,7 +38,7 @@ fn external() {
{ {
let db = DB::open_default(&path).unwrap(); let db = DB::open_default(&path).unwrap();
assert!(db.put(b"k1", b"v1111").is_ok()); assert!(db.put(b"k1", b"v1111").is_ok());
let r: Result<Option<DBVector>, Error> = db.get(b"k1"); let r: Result<Option<DBVector>, Error> = db.get(b"k1");
@ -130,14 +130,14 @@ fn snapshot_test() {
let path = DBPath::new("_rust_rocksdb_snapshottest"); let path = DBPath::new("_rust_rocksdb_snapshottest");
{ {
let db = DB::open_default(&path).unwrap(); let db = DB::open_default(&path).unwrap();
assert!(db.put(b"k1", b"v1111").is_ok()); assert!(db.put(b"k1", b"v1111").is_ok());
let snap = db.snapshot(); let snap = db.snapshot();
assert!(snap.get(b"k1").unwrap().unwrap().to_utf8().unwrap() == "v1111"); assert!(snap.get(b"k1").unwrap().unwrap().to_utf8().unwrap() == "v1111");
assert!(db.put(b"k2", b"v2222").is_ok()); assert!(db.put(b"k2", b"v2222").is_ok());
assert!(db.get(b"k2").unwrap().is_some()); assert!(db.get(b"k2").unwrap().is_some());
assert!(snap.get(b"k2").unwrap().is_none()); assert!(snap.get(b"k2").unwrap().is_none());
} }

@ -214,20 +214,20 @@ fn test_prefix_iterator_uses_full_prefix() {
// Explanation: `db.prefix_iterator` sets the underlying // Explanation: `db.prefix_iterator` sets the underlying
// options to seek to the first key that matches the *entire* // options to seek to the first key that matches the *entire*
// `prefix`. From there, the iterator will continue to read pairs // `prefix`. From there, the iterator will continue to read pairs
// as long as the prefix extracted from `key` matches the // as long as the prefix extracted from `key` matches the
// prefix extracted from `prefix`. // prefix extracted from `prefix`.
let path = DBPath::new("_rust_rocksdb_prefixiteratorusesfullprefixtest"); let path = DBPath::new("_rust_rocksdb_prefixiteratorusesfullprefixtest");
{ {
let data = [ let data = [
([0,0,0,0], b"111"), ([0, 0, 0, 0], b"111"),
([0,0,0,1], b"222"), ([0, 0, 0, 1], b"222"),
([0,1,0,1], b"333"), ([0, 1, 0, 1], b"333"),
([0,1,1,1], b"444"), ([0, 1, 1, 1], b"444"),
([0,1,2,1], b"555"), ([0, 1, 2, 1], b"555"),
([0,2,0,0], b"666"), ([0, 2, 0, 0], b"666"),
([2,0,0,0], b"777"), ([2, 0, 0, 0], b"777"),
([2,2,2,2], b"888") ([2, 2, 2, 2], b"888"),
]; ];
let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1);
@ -242,9 +242,10 @@ fn test_prefix_iterator_uses_full_prefix() {
assert!(db.put(key, *value).is_ok()); assert!(db.put(key, *value).is_ok());
} }
let prefix = [0,1,1]; let prefix = [0, 1, 1];
let results: Vec<_> = db.prefix_iterator(&prefix) let results: Vec<_> = db
.map(|(_,v)| std::str::from_utf8(&v).unwrap().to_string()) .prefix_iterator(&prefix)
.map(|(_, v)| std::str::from_utf8(&v).unwrap().to_string())
.collect(); .collect();
assert_eq!(results, vec!("444", "555", "666")); assert_eq!(results, vec!("444", "555", "666"));

Loading…
Cancel
Save