From 02d5b9361297f7e9d7b25a15fee845e0ad2501d3 Mon Sep 17 00:00:00 2001 From: Jordan Terrell Date: Mon, 4 Feb 2019 07:12:27 -0600 Subject: [PATCH 1/3] Adding format checks to CI build... --- .travis.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.travis.yml b/.travis.yml index c691293..2c92ee3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,7 +22,12 @@ addons: - libclang-3.9-dev - clang-3.9 +install: + - rustup component add rustfmt + - rustfmt -V + script: + - cargo fmt --all -- --check - cargo test --manifest-path=librocksdb-sys/Cargo.toml - cargo test From 896dbc6c617db88a987fd047640c97ac7458dd51 Mon Sep 17 00:00:00 2001 From: Jordan Terrell Date: Mon, 4 Feb 2019 09:07:55 -0600 Subject: [PATCH 2/3] Applying changes from rustfmt... --- librocksdb-sys/build.rs | 3 +- src/backup.rs | 2 +- src/compaction_filter.rs | 3 +- src/db.rs | 210 ++++++++++++++++++++-------------- src/lib.rs | 4 +- tests/test_compationfilter.rs | 2 +- tests/test_db.rs | 12 +- tests/test_iterator.rs | 25 ++-- 8 files changed, 152 insertions(+), 109 deletions(-) diff --git a/librocksdb-sys/build.rs b/librocksdb-sys/build.rs index acccadd..503fecf 100644 --- a/librocksdb-sys/build.rs +++ b/librocksdb-sys/build.rs @@ -119,7 +119,8 @@ fn build_rocksdb() { .filter(|file| match *file { "port/port_posix.cc" | "env/env_posix.cc" | "env/io_posix.cc" => false, _ => true, - }).collect::>(); + }) + .collect::>(); // Add Windows-specific sources lib_sources.push("port/win/port_win.cc"); diff --git a/src/backup.rs b/src/backup.rs index 3ede767..56901a0 100644 --- a/src/backup.rs +++ b/src/backup.rs @@ -46,7 +46,7 @@ impl BackupEngine { "Failed to convert path to CString \ when opening backup engine" .to_owned(), - )) + )); } }; diff --git a/src/compaction_filter.rs b/src/compaction_filter.rs index ee40b95..cedded9 100644 --- a/src/compaction_filter.rs +++ b/src/compaction_filter.rs @@ -45,7 +45,8 @@ impl CompactionFilterFn for F where F: FnMut(u32, &[u8], &[u8]) -> Decision, F: Send + 'static, -{} +{ +} pub struct CompactionFilterCallback where diff --git a/src/db.rs b/src/db.rs index 274e483..1ecb6c4 100644 --- a/src/db.rs +++ b/src/db.rs @@ -154,7 +154,7 @@ pub struct Snapshot<'a> { /// ``` pub struct DBRawIterator<'a> { inner: *mut ffi::rocksdb_iterator_t, - db: PhantomData<&'a DB> + db: PhantomData<&'a DB>, } /// An iterator over a database or column family, with specifiable @@ -214,7 +214,7 @@ impl<'a> DBRawIterator<'a> { unsafe { DBRawIterator { inner: ffi::rocksdb_create_iterator(db.inner, readopts.inner), - db: PhantomData + db: PhantomData, } } } @@ -227,7 +227,7 @@ impl<'a> DBRawIterator<'a> { unsafe { Ok(DBRawIterator { inner: ffi::rocksdb_create_iterator_cf(db.inner, readopts.inner, cf_handle.inner), - db: PhantomData + db: PhantomData, }) } } @@ -603,7 +603,11 @@ impl<'a> Snapshot<'a> { DBRawIterator::new(self.db, &readopts) } - pub fn raw_iterator_cf_opt(&self, cf_handle: ColumnFamily, mut readopts: ReadOptions) -> Result { + pub fn raw_iterator_cf_opt( + &self, + cf_handle: ColumnFamily, + mut readopts: ReadOptions, + ) -> Result { readopts.set_snapshot(self); DBRawIterator::new_cf(self.db, cf_handle, &readopts) } @@ -613,17 +617,30 @@ impl<'a> Snapshot<'a> { self.get_opt(key, readopts) } - pub fn get_cf>(&self, cf: ColumnFamily, key: K) -> Result, Error> { + pub fn get_cf>( + &self, + cf: ColumnFamily, + key: K, + ) -> Result, Error> { let readopts = ReadOptions::default(); self.get_cf_opt(cf, key.as_ref(), readopts) } - pub fn get_opt>(&self, key: K, mut readopts: ReadOptions) -> Result, Error> { + pub fn get_opt>( + &self, + key: K, + mut readopts: ReadOptions, + ) -> Result, Error> { readopts.set_snapshot(self); self.db.get_opt(key.as_ref(), &readopts) } - pub fn get_cf_opt>(&self, cf: ColumnFamily, key: K, mut readopts: ReadOptions) -> Result, Error> { + pub fn get_cf_opt>( + &self, + cf: ColumnFamily, + key: K, + mut readopts: ReadOptions, + ) -> Result, Error> { readopts.set_snapshot(self); self.db.get_cf_opt(cf, key.as_ref(), &readopts) } @@ -690,13 +707,14 @@ impl DB { "Failed to convert path to CString \ when opening DB." .to_owned(), - )) + )); } }; if let Err(e) = fs::create_dir_all(&path) { return Err(Error::new(format!( - "Failed to create RocksDB directory: `{:?}`.", e + "Failed to create RocksDB directory: `{:?}`.", + e ))); } @@ -755,7 +773,8 @@ impl DB { } for (n, h) in cfs_v.iter().zip(cfhandles) { - cf_map.write() + cf_map + .write() .map_err(|e| Error::new(e.to_string()))? .insert(n.name.clone(), h); } @@ -829,7 +848,11 @@ impl DB { self.write_opt(batch, &wo) } - pub fn get_opt>(&self, key: K, readopts: &ReadOptions) -> Result, Error> { + pub fn get_opt>( + &self, + key: K, + readopts: &ReadOptions, + ) -> Result, Error> { if readopts.inner.is_null() { return Err(Error::new( "Unable to create RocksDB read options. \ @@ -902,7 +925,11 @@ impl DB { } } - pub fn get_cf>(&self, cf: ColumnFamily, key: K) -> Result, Error> { + pub fn get_cf>( + &self, + cf: ColumnFamily, + key: K, + ) -> Result, Error> { self.get_cf_opt(cf, key.as_ref(), &ReadOptions::default()) } @@ -914,7 +941,7 @@ impl DB { "Failed to convert path to CString \ when opening rocksdb" .to_owned(), - )) + )); } }; let cf = unsafe { @@ -924,10 +951,12 @@ impl DB { cname.as_ptr(), )); - self.cfs.write().map_err(|e| Error::new(e.to_string()))? + self.cfs + .write() + .map_err(|e| Error::new(e.to_string()))? .insert(name.to_string(), cf_handle); - - ColumnFamily { + + ColumnFamily { inner: cf_handle, db: PhantomData, } @@ -936,29 +965,29 @@ impl DB { } pub fn drop_cf(&self, name: &str) -> Result<(), Error> { - if let Some(cf) = self.cfs.write().map_err(|e| Error::new(e.to_string()))? - .remove(name) { + if let Some(cf) = self + .cfs + .write() + .map_err(|e| Error::new(e.to_string()))? + .remove(name) + { unsafe { ffi_try!(ffi::rocksdb_drop_column_family(self.inner, cf,)); } Ok(()) } else { Err(Error::new( - format!("Invalid column family: {}", name).to_owned() + format!("Invalid column family: {}", name).to_owned(), )) } } /// Return the underlying column family handle. pub fn cf_handle(&self, name: &str) -> Option { - self.cfs - .read() - .ok()? - .get(name) - .map(|h| ColumnFamily { - inner: *h, - db: PhantomData - }) + self.cfs.read().ok()?.get(name).map(|h| ColumnFamily { + inner: *h, + db: PhantomData, + }) } pub fn iterator(&self, mode: IteratorMode) -> DBIterator { @@ -982,7 +1011,11 @@ impl DB { pub fn prefix_iterator>(&self, prefix: P) -> DBIterator { let mut opts = ReadOptions::default(); opts.set_prefix_same_as_start(true); - DBIterator::new(self, &opts, IteratorMode::From(prefix.as_ref(), Direction::Forward)) + DBIterator::new( + self, + &opts, + IteratorMode::From(prefix.as_ref(), Direction::Forward), + ) } pub fn iterator_cf( @@ -1007,7 +1040,7 @@ impl DB { pub fn prefix_iterator_cf>( &self, cf_handle: ColumnFamily, - prefix: P + prefix: P, ) -> Result { let mut opts = ReadOptions::default(); opts.set_prefix_same_as_start(true); @@ -1034,14 +1067,14 @@ impl DB { } pub fn put_opt(&self, key: K, value: V, writeopts: &WriteOptions) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { let key = key.as_ref(); let value = value.as_ref(); unsafe { - ffi_try!(ffi::rocksdb_put( self.inner, writeopts.inner, @@ -1060,15 +1093,15 @@ impl DB { key: K, value: V, writeopts: &WriteOptions, - ) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + ) -> Result<(), Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { let key = key.as_ref(); let value = value.as_ref(); unsafe { - ffi_try!(ffi::rocksdb_put_cf( self.inner, writeopts.inner, @@ -1082,15 +1115,11 @@ impl DB { } } - pub fn merge_opt( - &self, - key: K, - value: V, - writeopts: &WriteOptions, - ) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + pub fn merge_opt(&self, key: K, value: V, writeopts: &WriteOptions) -> Result<(), Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { let key = key.as_ref(); let value = value.as_ref(); @@ -1114,14 +1143,14 @@ impl DB { value: V, writeopts: &WriteOptions, ) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { let key = key.as_ref(); let value = value.as_ref(); unsafe { - ffi_try!(ffi::rocksdb_merge_cf( self.inner, writeopts.inner, @@ -1135,9 +1164,13 @@ impl DB { } } - pub fn delete_opt>(&self, key: K, writeopts: &WriteOptions) -> Result<(), Error> { + pub fn delete_opt>( + &self, + key: K, + writeopts: &WriteOptions, + ) -> Result<(), Error> { let key = key.as_ref(); - + unsafe { ffi_try!(ffi::rocksdb_delete( self.inner, @@ -1155,7 +1188,6 @@ impl DB { key: K, writeopts: &WriteOptions, ) -> Result<(), Error> { - let key = key.as_ref(); unsafe { @@ -1171,30 +1203,34 @@ impl DB { } pub fn put(&self, key: K, value: V) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { self.put_opt(key.as_ref(), value.as_ref(), &WriteOptions::default()) } - pub fn put_cf(&self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + pub fn put_cf(&self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { self.put_cf_opt(cf, key.as_ref(), value.as_ref(), &WriteOptions::default()) } - pub fn merge(&self, key: K, value: V) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + pub fn merge(&self, key: K, value: V) -> Result<(), Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { self.merge_opt(key.as_ref(), value.as_ref(), &WriteOptions::default()) } - pub fn merge_cf(&self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + pub fn merge_cf(&self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { self.merge_cf_opt(cf, key.as_ref(), value.as_ref(), &WriteOptions::default()) } @@ -1284,9 +1320,10 @@ impl WriteBatch { /// Insert a value into the database under the given key. pub fn put(&mut self, key: K, value: V) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { let key = key.as_ref(); let value = value.as_ref(); @@ -1302,10 +1339,11 @@ impl WriteBatch { } } - pub fn put_cf(&mut self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + pub fn put_cf(&mut self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { let key = key.as_ref(); let value = value.as_ref(); @@ -1322,10 +1360,11 @@ impl WriteBatch { } } - pub fn merge(&mut self, key: K, value: V) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + pub fn merge(&mut self, key: K, value: V) -> Result<(), Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { let key = key.as_ref(); let value = value.as_ref(); @@ -1341,10 +1380,11 @@ impl WriteBatch { } } - pub fn merge_cf(&mut self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> - where K: AsRef<[u8]>, - V: AsRef<[u8]> { - + pub fn merge_cf(&mut self, cf: ColumnFamily, key: K, value: V) -> Result<(), Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { let key = key.as_ref(); let value = value.as_ref(); @@ -1458,7 +1498,7 @@ impl ReadOptions { pub fn set_iterate_upper_bound>(&mut self, key: K) { let key = key.as_ref(); - + unsafe { ffi::rocksdb_readoptions_set_iterate_upper_bound( self.inner, diff --git a/src/lib.rs b/src/lib.rs index 56ce4c7..10390b4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -71,8 +71,8 @@ mod slice_transform; pub use compaction_filter::Decision as CompactionDecision; pub use db::{ - DBCompactionStyle, DBCompressionType, DBIterator, DBRawIterator, DBRecoveryMode, - DBVector, Direction, IteratorMode, ReadOptions, Snapshot, WriteBatch, + DBCompactionStyle, DBCompressionType, DBIterator, DBRawIterator, DBRecoveryMode, DBVector, + Direction, IteratorMode, ReadOptions, Snapshot, WriteBatch, }; pub use slice_transform::SliceTransform; diff --git a/tests/test_compationfilter.rs b/tests/test_compationfilter.rs index 26529be..8e7176c 100644 --- a/tests/test_compationfilter.rs +++ b/tests/test_compationfilter.rs @@ -16,7 +16,7 @@ extern crate rocksdb; mod util; -use rocksdb::{CompactionDecision, DB, Options}; +use rocksdb::{CompactionDecision, Options, DB}; use util::DBPath; #[cfg(test)] diff --git a/tests/test_db.rs b/tests/test_db.rs index ecb0bb5..71e584f 100644 --- a/tests/test_db.rs +++ b/tests/test_db.rs @@ -12,14 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -extern crate rocksdb; extern crate libc; +extern crate rocksdb; mod util; -use libc::{size_t}; +use libc::size_t; -use rocksdb::{DB, DBVector, Error, IteratorMode, Options, WriteBatch}; +use rocksdb::{DBVector, Error, IteratorMode, Options, WriteBatch, DB}; use util::DBPath; #[test] @@ -38,7 +38,7 @@ fn external() { { let db = DB::open_default(&path).unwrap(); - + assert!(db.put(b"k1", b"v1111").is_ok()); let r: Result, Error> = db.get(b"k1"); @@ -130,14 +130,14 @@ fn snapshot_test() { let path = DBPath::new("_rust_rocksdb_snapshottest"); { let db = DB::open_default(&path).unwrap(); - + assert!(db.put(b"k1", b"v1111").is_ok()); let snap = db.snapshot(); assert!(snap.get(b"k1").unwrap().unwrap().to_utf8().unwrap() == "v1111"); assert!(db.put(b"k2", b"v2222").is_ok()); - + assert!(db.get(b"k2").unwrap().is_some()); assert!(snap.get(b"k2").unwrap().is_none()); } diff --git a/tests/test_iterator.rs b/tests/test_iterator.rs index 578b398..88b5482 100644 --- a/tests/test_iterator.rs +++ b/tests/test_iterator.rs @@ -214,20 +214,20 @@ fn test_prefix_iterator_uses_full_prefix() { // Explanation: `db.prefix_iterator` sets the underlying // options to seek to the first key that matches the *entire* // `prefix`. From there, the iterator will continue to read pairs - // as long as the prefix extracted from `key` matches the + // as long as the prefix extracted from `key` matches the // prefix extracted from `prefix`. let path = DBPath::new("_rust_rocksdb_prefixiteratorusesfullprefixtest"); { let data = [ - ([0,0,0,0], b"111"), - ([0,0,0,1], b"222"), - ([0,1,0,1], b"333"), - ([0,1,1,1], b"444"), - ([0,1,2,1], b"555"), - ([0,2,0,0], b"666"), - ([2,0,0,0], b"777"), - ([2,2,2,2], b"888") + ([0, 0, 0, 0], b"111"), + ([0, 0, 0, 1], b"222"), + ([0, 1, 0, 1], b"333"), + ([0, 1, 1, 1], b"444"), + ([0, 1, 2, 1], b"555"), + ([0, 2, 0, 0], b"666"), + ([2, 0, 0, 0], b"777"), + ([2, 2, 2, 2], b"888"), ]; let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); @@ -242,9 +242,10 @@ fn test_prefix_iterator_uses_full_prefix() { assert!(db.put(key, *value).is_ok()); } - let prefix = [0,1,1]; - let results: Vec<_> = db.prefix_iterator(&prefix) - .map(|(_,v)| std::str::from_utf8(&v).unwrap().to_string()) + let prefix = [0, 1, 1]; + let results: Vec<_> = db + .prefix_iterator(&prefix) + .map(|(_, v)| std::str::from_utf8(&v).unwrap().to_string()) .collect(); assert_eq!(results, vec!("444", "555", "666")); From 59d54070b945d1ba31bd300c632815a1bed8b072 Mon Sep 17 00:00:00 2001 From: Jordan Terrell Date: Tue, 5 Feb 2019 07:23:18 -0600 Subject: [PATCH 3/3] Formatting recent updates to master branch [skip ci] --- src/db.rs | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/src/db.rs b/src/db.rs index dd2a061..8cdf53a 100644 --- a/src/db.rs +++ b/src/db.rs @@ -683,11 +683,12 @@ impl DB { /// Open a database with the given database options and column family names. /// /// Column families opened using this function will be created with default `Options`. - pub fn open_cf(opts: &Options, path: P, cfs: I) -> Result - where P: AsRef, - I: IntoIterator, - N: AsRef { - + pub fn open_cf(opts: &Options, path: P, cfs: I) -> Result + where + P: AsRef, + I: IntoIterator, + N: AsRef, + { let cfs = cfs .into_iter() .map(|name| ColumnFamilyDescriptor::new(name.as_ref(), Options::default())); @@ -696,17 +697,12 @@ impl DB { } /// Open a database with the given database options and column family descriptors. - pub fn open_cf_descriptors( - opts: &Options, - path: P, - cfs: I, - ) -> Result - where P: AsRef, - I: IntoIterator { - - let cfs: Vec<_> = cfs - .into_iter() - .collect(); + pub fn open_cf_descriptors(opts: &Options, path: P, cfs: I) -> Result + where + P: AsRef, + I: IntoIterator, + { + let cfs: Vec<_> = cfs.into_iter().collect(); let path = path.as_ref(); let cpath = match CString::new(path.to_string_lossy().as_bytes()) {