Merge pull request #229 from iSynaptic/master

Fixing rustfmt.toml and applying formatting...
master
Jordan Terrell 6 years ago committed by GitHub
commit 0d61f9bfcd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 25
      librocksdb-sys/build.rs
  2. 672
      librocksdb-sys/tests/ffi.rs
  3. 4
      rustfmt.toml
  4. 8
      src/backup.rs
  5. 22
      src/checkpoint.rs
  6. 7
      src/compaction_filter.rs
  7. 1
      src/comparator.rs
  8. 96
      src/db.rs
  9. 63
      src/db_options.rs
  10. 22
      src/lib.rs
  11. 722
      src/merge_operator.rs
  12. 62
      src/slice_transform.rs
  13. 2
      tests/test_checkpoint.rs
  14. 55
      tests/test_column_family.rs
  15. 28
      tests/test_iterator.rs
  16. 3
      tests/test_multithreaded.rs
  17. 15
      tests/test_raw_iterator.rs
  18. 2
      tests/test_rocksdb_options.rs
  19. 6
      tests/test_slice_transform.rs
  20. 11
      tests/util/mod.rs

@ -1,5 +1,5 @@
extern crate cc;
extern crate bindgen; extern crate bindgen;
extern crate cc;
extern crate glob; extern crate glob;
use std::env; use std::env;
@ -94,7 +94,6 @@ fn build_rocksdb() {
config.define("OS_MACOSX", Some("1")); config.define("OS_MACOSX", Some("1"));
config.define("ROCKSDB_PLATFORM_POSIX", Some("1")); config.define("ROCKSDB_PLATFORM_POSIX", Some("1"));
config.define("ROCKSDB_LIB_IO_POSIX", Some("1")); config.define("ROCKSDB_LIB_IO_POSIX", Some("1"));
} }
if cfg!(target_os = "linux") { if cfg!(target_os = "linux") {
config.define("OS_LINUX", Some("1")); config.define("OS_LINUX", Some("1"));
@ -118,12 +117,9 @@ fn build_rocksdb() {
.iter() .iter()
.cloned() .cloned()
.filter(|file| match *file { .filter(|file| match *file {
"port/port_posix.cc" | "port/port_posix.cc" | "env/env_posix.cc" | "env/io_posix.cc" => false,
"env/env_posix.cc" |
"env/io_posix.cc" => false,
_ => true, _ => true,
}) }).collect::<Vec<&'static str>>();
.collect::<Vec<&'static str>>();
// Add Windows-specific sources // Add Windows-specific sources
lib_sources.push("port/win/port_win.cc"); lib_sources.push("port/win/port_win.cc");
@ -185,12 +181,11 @@ fn build_lz4() {
compiler.opt_level(3); compiler.opt_level(3);
match env::var("TARGET").unwrap().as_str() match env::var("TARGET").unwrap().as_str() {
{ "i686-pc-windows-gnu" => {
"i686-pc-windows-gnu" => { compiler.flag("-fno-tree-vectorize");
compiler.flag("-fno-tree-vectorize"); }
}, _ => {}
_ => {}
} }
compiler.compile("liblz4.a"); compiler.compile("liblz4.a");
@ -227,9 +222,7 @@ fn build_zstd() {
fn build_zlib() { fn build_zlib() {
let mut compiler = cc::Build::new(); let mut compiler = cc::Build::new();
let globs = &[ let globs = &["zlib/*.c"];
"zlib/*.c"
];
for pattern in globs { for pattern in globs {
for path in glob::glob(pattern).unwrap() { for path in glob::glob(pattern).unwrap() {

@ -15,7 +15,13 @@
// This code is based on <https://github.com/facebook/rocksdb/blob/master/db/c_test.c>, revision a10e8a056d569acf6a52045124e6414ad33bdfcd. // This code is based on <https://github.com/facebook/rocksdb/blob/master/db/c_test.c>, revision a10e8a056d569acf6a52045124e6414ad33bdfcd.
#![allow(non_snake_case, non_upper_case_globals, unused_mut, unused_unsafe, unused_variables)] #![allow(
non_snake_case,
non_upper_case_globals,
unused_mut,
unused_unsafe,
unused_variables
)]
#[macro_use] #[macro_use]
extern crate const_cstr; extern crate const_cstr;
@ -23,18 +29,18 @@ extern crate libc;
extern crate librocksdb_sys as ffi; extern crate librocksdb_sys as ffi;
extern crate uuid; extern crate uuid;
use ::ffi::*; use ffi::*;
use ::libc::*; use libc::*;
use ::std::borrow::Cow; use std::borrow::Cow;
use ::std::env; use std::env;
use ::std::ffi::{CStr, CString}; use std::ffi::{CStr, CString};
use ::std::io::Write; use std::io::Write;
use ::std::mem; use std::mem;
use ::std::path::PathBuf; use std::path::PathBuf;
use ::std::ptr; use std::ptr;
use ::std::slice; use std::slice;
use ::std::str; use std::str;
use ::uuid::Uuid; use uuid::Uuid;
macro_rules! err_println { macro_rules! err_println {
($($arg:tt)*) => (writeln!(&mut ::std::io::stderr(), $($arg)*).expect("failed printing to stderr")); ($($arg:tt)*) => (writeln!(&mut ::std::io::stderr(), $($arg)*).expect("failed printing to stderr"));
@ -77,28 +83,37 @@ unsafe fn StartPhase(name: &'static str) {
} }
macro_rules! CheckNoError { macro_rules! CheckNoError {
($err:ident) => { unsafe { ($err:ident) => {
assert!($err.is_null(), "{}: {}", phase, rstr($err)); unsafe {
} }; assert!($err.is_null(), "{}: {}", phase, rstr($err));
}
};
} }
macro_rules! CheckCondition { macro_rules! CheckCondition {
($cond:expr) => { unsafe { ($cond:expr) => {
assert!($cond, "{}: {}", phase, stringify!($cond)); unsafe {
} }; assert!($cond, "{}: {}", phase, stringify!($cond));
}
};
} }
unsafe fn CheckEqual(expected: *const c_char, v: *const c_char, n: size_t) { unsafe fn CheckEqual(expected: *const c_char, v: *const c_char, n: size_t) {
if expected.is_null() && v.is_null() { if expected.is_null() && v.is_null() {
// ok // ok
} else if !expected.is_null() && !v.is_null() && n == strlen(expected) && } else if !expected.is_null()
memcmp(expected as *const c_void, v as *const c_void, n) == 0 { && !v.is_null()
&& n == strlen(expected)
&& memcmp(expected as *const c_void, v as *const c_void, n) == 0
{
// ok // ok
} else { } else {
panic!("{}: expected '{}', got '{}'", panic!(
phase, "{}: expected '{}', got '{}'",
rstr(strndup(expected, n)), phase,
rstr(strndup(v, 5))); rstr(strndup(expected, n)),
rstr(strndup(v, 5))
);
} }
} }
@ -109,10 +124,12 @@ unsafe fn Free<T>(ptr: *mut *mut T) {
} }
} }
unsafe fn CheckGet(mut db: *mut rocksdb_t, unsafe fn CheckGet(
options: *mut rocksdb_readoptions_t, mut db: *mut rocksdb_t,
key: *const c_char, options: *mut rocksdb_readoptions_t,
expected: *const c_char) { key: *const c_char,
expected: *const c_char,
) {
let mut err: *mut c_char = ptr::null_mut(); let mut err: *mut c_char = ptr::null_mut();
let mut val_len: size_t = 0; let mut val_len: size_t = 0;
let mut val: *mut c_char = rocksdb_get(db, options, key, strlen(key), &mut val_len, &mut err); let mut val: *mut c_char = rocksdb_get(db, options, key, strlen(key), &mut val_len, &mut err);
@ -121,20 +138,24 @@ unsafe fn CheckGet(mut db: *mut rocksdb_t,
Free(&mut val); Free(&mut val);
} }
unsafe fn CheckGetCF(db: *mut rocksdb_t, unsafe fn CheckGetCF(
options: *const rocksdb_readoptions_t, db: *mut rocksdb_t,
handle: *mut rocksdb_column_family_handle_t, options: *const rocksdb_readoptions_t,
key: *const c_char, handle: *mut rocksdb_column_family_handle_t,
expected: *const c_char) { key: *const c_char,
expected: *const c_char,
) {
let mut err: *mut c_char = ptr::null_mut(); let mut err: *mut c_char = ptr::null_mut();
let mut val_len: size_t = 0; let mut val_len: size_t = 0;
let mut val: *mut c_char = rocksdb_get_cf(db, let mut val: *mut c_char = rocksdb_get_cf(
options, db,
handle, options,
key, handle,
strlen(key), key,
&mut val_len, strlen(key),
&mut err); &mut val_len,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
CheckEqual(expected, val, val_len); CheckEqual(expected, val, val_len);
Free(&mut val); Free(&mut val);
@ -150,11 +171,13 @@ unsafe fn CheckIter(iter: *mut rocksdb_iterator_t, key: *const c_char, val: *con
} }
// Callback from rocksdb_writebatch_iterate() // Callback from rocksdb_writebatch_iterate()
unsafe extern "C" fn CheckPut(ptr: *mut c_void, unsafe extern "C" fn CheckPut(
k: *const c_char, ptr: *mut c_void,
klen: size_t, k: *const c_char,
v: *const c_char, klen: size_t,
vlen: size_t) { v: *const c_char,
vlen: size_t,
) {
let mut state: *mut c_int = ptr as *mut c_int; let mut state: *mut c_int = ptr as *mut c_int;
CheckCondition!(*state < 2); CheckCondition!(*state < 2);
match *state { match *state {
@ -181,17 +204,14 @@ unsafe extern "C" fn CheckDel(ptr: *mut c_void, k: *const c_char, klen: size_t)
unsafe extern "C" fn CmpDestroy(arg: *mut c_void) {} unsafe extern "C" fn CmpDestroy(arg: *mut c_void) {}
unsafe extern "C" fn CmpCompare(arg: *mut c_void, unsafe extern "C" fn CmpCompare(
a: *const c_char, arg: *mut c_void,
alen: size_t, a: *const c_char,
b: *const c_char, alen: size_t,
blen: size_t) b: *const c_char,
-> c_int { blen: size_t,
let n = if alen < blen { ) -> c_int {
alen let n = if alen < blen { alen } else { blen };
} else {
blen
};
let mut r = memcmp(a as *const c_void, b as *const c_void, n); let mut r = memcmp(a as *const c_void, b as *const c_void, n);
if r == 0 { if r == 0 {
if alen < blen { if alen < blen {
@ -217,28 +237,34 @@ unsafe extern "C" fn FilterName(arg: *mut c_void) -> *const c_char {
cstrp!("TestFilter") cstrp!("TestFilter")
} }
unsafe extern "C" fn FilterCreate(arg: *mut c_void, unsafe extern "C" fn FilterCreate(
key_array: *const *const c_char, arg: *mut c_void,
key_length_array: *const size_t, key_array: *const *const c_char,
num_keys: c_int, key_length_array: *const size_t,
filter_length: *mut size_t) num_keys: c_int,
-> *mut c_char { filter_length: *mut size_t,
) -> *mut c_char {
*filter_length = 4; *filter_length = 4;
let result = malloc(4); let result = malloc(4);
memcpy(result, cstrp!("fake") as *const c_void, 4); memcpy(result, cstrp!("fake") as *const c_void, 4);
result as *mut c_char result as *mut c_char
} }
unsafe extern "C" fn FilterKeyMatch(arg: *mut c_void, unsafe extern "C" fn FilterKeyMatch(
key: *const c_char, arg: *mut c_void,
length: size_t, key: *const c_char,
filter: *const c_char, length: size_t,
filter_length: size_t) filter: *const c_char,
-> c_uchar { filter_length: size_t,
) -> c_uchar {
CheckCondition!(filter_length == 4); CheckCondition!(filter_length == 4);
CheckCondition!(memcmp(filter as *const c_void, CheckCondition!(
cstrp!("fake") as *const c_void, memcmp(
filter_length) == 0); filter as *const c_void,
cstrp!("fake") as *const c_void,
filter_length
) == 0
);
fake_filter_result fake_filter_result
} }
@ -250,24 +276,31 @@ unsafe extern "C" fn CFilterName(arg: *mut c_void) -> *const c_char {
cstrp!("foo") cstrp!("foo")
} }
unsafe extern "C" fn CFilterFilter(arg: *mut c_void, unsafe extern "C" fn CFilterFilter(
level: c_int, arg: *mut c_void,
key: *const c_char, level: c_int,
key_length: size_t, key: *const c_char,
existing_value: *const c_char, key_length: size_t,
value_length: size_t, existing_value: *const c_char,
new_value: *mut *mut c_char, value_length: size_t,
new_value_length: *mut size_t, new_value: *mut *mut c_char,
value_changed: *mut u8) new_value_length: *mut size_t,
-> c_uchar { value_changed: *mut u8,
) -> c_uchar {
if key_length == 3 { if key_length == 3 {
if memcmp(mem::transmute(key), if memcmp(
mem::transmute(cstrp!("bar")), mem::transmute(key),
key_length) == 0 { mem::transmute(cstrp!("bar")),
key_length,
) == 0
{
return 1; return 1;
} else if memcmp(mem::transmute(key), } else if memcmp(
mem::transmute(cstrp!("baz")), mem::transmute(key),
key_length) == 0 { mem::transmute(cstrp!("baz")),
key_length,
) == 0
{
*value_changed = 1; *value_changed = 1;
*new_value = cstrp!("newbazvalue") as *mut c_char; *new_value = cstrp!("newbazvalue") as *mut c_char;
*new_value_length = 11; *new_value_length = 11;
@ -283,49 +316,59 @@ unsafe extern "C" fn CFilterFactoryName(arg: *mut c_void) -> *const c_char {
cstrp!("foo") cstrp!("foo")
} }
unsafe extern "C" fn CFilterCreate(arg: *mut c_void, unsafe extern "C" fn CFilterCreate(
context: *mut rocksdb_compactionfiltercontext_t) arg: *mut c_void,
-> *mut rocksdb_compactionfilter_t { context: *mut rocksdb_compactionfiltercontext_t,
rocksdb_compactionfilter_create(ptr::null_mut(), ) -> *mut rocksdb_compactionfilter_t {
Some(CFilterDestroy), rocksdb_compactionfilter_create(
Some(CFilterFilter), ptr::null_mut(),
Some(CFilterName)) Some(CFilterDestroy),
Some(CFilterFilter),
Some(CFilterName),
)
} }
unsafe fn CheckCompaction(dbname: *const c_char, unsafe fn CheckCompaction(
db: *mut rocksdb_t, dbname: *const c_char,
options: *const rocksdb_options_t, db: *mut rocksdb_t,
roptions: *mut rocksdb_readoptions_t, options: *const rocksdb_options_t,
woptions: *mut rocksdb_writeoptions_t) roptions: *mut rocksdb_readoptions_t,
-> *mut rocksdb_t { woptions: *mut rocksdb_writeoptions_t,
) -> *mut rocksdb_t {
let mut err: *mut c_char = ptr::null_mut(); let mut err: *mut c_char = ptr::null_mut();
let db = rocksdb_open(options, dbname, &mut err); let db = rocksdb_open(options, dbname, &mut err);
CheckNoError!(err); CheckNoError!(err);
rocksdb_put(db, rocksdb_put(
woptions, db,
cstrp!("foo"), woptions,
3, cstrp!("foo"),
cstrp!("foovalue"), 3,
8, cstrp!("foovalue"),
&mut err); 8,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
CheckGet(db, roptions, cstrp!("foo"), cstrp!("foovalue")); CheckGet(db, roptions, cstrp!("foo"), cstrp!("foovalue"));
rocksdb_put(db, rocksdb_put(
woptions, db,
cstrp!("bar"), woptions,
3, cstrp!("bar"),
cstrp!("barvalue"), 3,
8, cstrp!("barvalue"),
&mut err); 8,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
CheckGet(db, roptions, cstrp!("bar"), cstrp!("barvalue")); CheckGet(db, roptions, cstrp!("bar"), cstrp!("barvalue"));
rocksdb_put(db, rocksdb_put(
woptions, db,
cstrp!("baz"), woptions,
3, cstrp!("baz"),
cstrp!("bazvalue"), 3,
8, cstrp!("bazvalue"),
&mut err); 8,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
CheckGet(db, roptions, cstrp!("baz"), cstrp!("bazvalue")); CheckGet(db, roptions, cstrp!("baz"), cstrp!("bazvalue"));
@ -346,17 +389,18 @@ unsafe extern "C" fn MergeOperatorName(arg: *mut c_void) -> *const c_char {
cstrp!("foo") cstrp!("foo")
} }
unsafe extern "C" fn MergeOperatorFullMerge(arg: *mut c_void, unsafe extern "C" fn MergeOperatorFullMerge(
key: *const c_char, arg: *mut c_void,
key_length: size_t, key: *const c_char,
existing_value: *const c_char, key_length: size_t,
existing_value_length: size_t, existing_value: *const c_char,
operands_list: *const *const c_char, existing_value_length: size_t,
operands_list_length: *const size_t, operands_list: *const *const c_char,
num_operands: c_int, operands_list_length: *const size_t,
success: *mut u8, num_operands: c_int,
new_value_length: *mut size_t) success: *mut u8,
-> *mut c_char { new_value_length: *mut size_t,
) -> *mut c_char {
*new_value_length = 4; *new_value_length = 4;
*success = 1; *success = 1;
let result: *mut c_char = malloc(4) as *mut _; let result: *mut c_char = malloc(4) as *mut _;
@ -364,15 +408,16 @@ unsafe extern "C" fn MergeOperatorFullMerge(arg: *mut c_void,
result result
} }
unsafe extern "C" fn MergeOperatorPartialMerge(arg: *mut c_void, unsafe extern "C" fn MergeOperatorPartialMerge(
key: *const c_char, arg: *mut c_void,
key_length: size_t, key: *const c_char,
operands_list: *const *const c_char, key_length: size_t,
operands_list_length: *const size_t, operands_list: *const *const c_char,
num_operands: c_int, operands_list_length: *const size_t,
success: *mut u8, num_operands: c_int,
new_value_length: *mut size_t) success: *mut u8,
-> *mut c_char { new_value_length: *mut size_t,
) -> *mut c_char {
*new_value_length = 4; *new_value_length = 4;
*success = 1; *success = 1;
let result: *mut c_char = malloc(4) as *mut _; let result: *mut c_char = malloc(4) as *mut _;
@ -413,10 +458,12 @@ fn ffi() {
let dbbackupname = dbbackupname.as_ptr(); let dbbackupname = dbbackupname.as_ptr();
StartPhase("create_objects"); StartPhase("create_objects");
cmp = rocksdb_comparator_create(ptr::null_mut(), cmp = rocksdb_comparator_create(
Some(CmpDestroy), ptr::null_mut(),
Some(CmpCompare), Some(CmpDestroy),
Some(CmpName)); Some(CmpCompare),
Some(CmpName),
);
env = rocksdb_create_default_env(); env = rocksdb_create_default_env();
cache = rocksdb_cache_create_lru(100000); cache = rocksdb_cache_create_lru(100000);
@ -440,10 +487,12 @@ fn ffi() {
no_compression, no_compression,
no_compression, no_compression,
no_compression, no_compression,
]; ];
rocksdb_options_set_compression_per_level(options, rocksdb_options_set_compression_per_level(
mem::transmute(compression_levels.as_ptr()), options,
compression_levels.len() as size_t); mem::transmute(compression_levels.as_ptr()),
compression_levels.len() as size_t,
);
roptions = rocksdb_readoptions_create(); roptions = rocksdb_readoptions_create();
rocksdb_readoptions_set_verify_checksums(roptions, 1); rocksdb_readoptions_set_verify_checksums(roptions, 1);
@ -513,11 +562,13 @@ fn ffi() {
let restore_options = rocksdb_restore_options_create(); let restore_options = rocksdb_restore_options_create();
rocksdb_restore_options_set_keep_log_files(restore_options, 0); rocksdb_restore_options_set_keep_log_files(restore_options, 0);
rocksdb_backup_engine_restore_db_from_latest_backup(be, rocksdb_backup_engine_restore_db_from_latest_backup(
dbname, be,
dbname, dbname,
restore_options, dbname,
&mut err); restore_options,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
rocksdb_restore_options_destroy(restore_options); rocksdb_restore_options_destroy(restore_options);
@ -553,10 +604,12 @@ fn ffi() {
CheckGet(db, roptions, cstrp!("bar"), ptr::null()); CheckGet(db, roptions, cstrp!("bar"), ptr::null());
CheckGet(db, roptions, cstrp!("box"), cstrp!("c")); CheckGet(db, roptions, cstrp!("box"), cstrp!("c"));
let mut pos: c_int = 0; let mut pos: c_int = 0;
rocksdb_writebatch_iterate(wb, rocksdb_writebatch_iterate(
mem::transmute(&mut pos), wb,
Some(CheckPut), mem::transmute(&mut pos),
Some(CheckDel)); Some(CheckPut),
Some(CheckDel),
);
CheckCondition!(pos == 3); CheckCondition!(pos == 3);
rocksdb_writebatch_destroy(wb); rocksdb_writebatch_destroy(wb);
} }
@ -568,13 +621,15 @@ fn ffi() {
let k_sizes: [size_t; 2] = [1, 2]; let k_sizes: [size_t; 2] = [1, 2];
let v_list: [*const c_char; 3] = [cstrp!("x"), cstrp!("y"), cstrp!("z")]; let v_list: [*const c_char; 3] = [cstrp!("x"), cstrp!("y"), cstrp!("z")];
let v_sizes: [size_t; 3] = [1, 1, 1]; let v_sizes: [size_t; 3] = [1, 1, 1];
rocksdb_writebatch_putv(wb, rocksdb_writebatch_putv(
k_list.len() as c_int, wb,
k_list.as_ptr(), k_list.len() as c_int,
k_sizes.as_ptr(), k_list.as_ptr(),
v_list.len() as c_int, k_sizes.as_ptr(),
v_list.as_ptr(), v_list.len() as c_int,
v_sizes.as_ptr()); v_list.as_ptr(),
v_sizes.as_ptr(),
);
rocksdb_write(db, woptions, wb, &mut err); rocksdb_write(db, woptions, wb, &mut err);
CheckNoError!(err); CheckNoError!(err);
CheckGet(db, roptions, cstrp!("zap"), cstrp!("xyz")); CheckGet(db, roptions, cstrp!("zap"), cstrp!("xyz"));
@ -596,10 +651,13 @@ fn ffi() {
let mut wb2 = rocksdb_writebatch_create_from(rep as *const c_char, repsize1); let mut wb2 = rocksdb_writebatch_create_from(rep as *const c_char, repsize1);
CheckCondition!(rocksdb_writebatch_count(wb1) == rocksdb_writebatch_count(wb2)); CheckCondition!(rocksdb_writebatch_count(wb1) == rocksdb_writebatch_count(wb2));
let mut repsize2: size_t = 0; let mut repsize2: size_t = 0;
CheckCondition!(memcmp(rep, CheckCondition!(
rocksdb_writebatch_data(wb2, &mut repsize2) as *const c_void, memcmp(
repsize1) == rep,
0); rocksdb_writebatch_data(wb2, &mut repsize2) as *const c_void,
repsize1
) == 0
);
rocksdb_writebatch_destroy(wb1); rocksdb_writebatch_destroy(wb1);
rocksdb_writebatch_destroy(wb2); rocksdb_writebatch_destroy(wb2);
} }
@ -633,14 +691,16 @@ fn ffi() {
let mut vals: [*mut c_char; 3] = [ptr::null_mut(), ptr::null_mut(), ptr::null_mut()]; let mut vals: [*mut c_char; 3] = [ptr::null_mut(), ptr::null_mut(), ptr::null_mut()];
let mut vals_sizes: [size_t; 3] = [0, 0, 0]; let mut vals_sizes: [size_t; 3] = [0, 0, 0];
let mut errs: [*mut c_char; 3] = [ptr::null_mut(), ptr::null_mut(), ptr::null_mut()]; let mut errs: [*mut c_char; 3] = [ptr::null_mut(), ptr::null_mut(), ptr::null_mut()];
rocksdb_multi_get(db, rocksdb_multi_get(
roptions, db,
3, roptions,
keys.as_ptr(), 3,
keys_sizes.as_ptr(), keys.as_ptr(),
vals.as_mut_ptr(), keys_sizes.as_ptr(),
vals_sizes.as_mut_ptr(), vals.as_mut_ptr(),
errs.as_mut_ptr()); vals_sizes.as_mut_ptr(),
errs.as_mut_ptr(),
);
for i in 0..3 { for i in 0..3 {
CheckEqual(ptr::null(), errs[i], 0); CheckEqual(ptr::null(), errs[i], 0);
@ -667,22 +727,26 @@ fn ffi() {
let key = keybuf.to_bytes_with_nul(); let key = keybuf.to_bytes_with_nul();
let valbuf = CString::new(format!("v{:020}", i)).unwrap(); let valbuf = CString::new(format!("v{:020}", i)).unwrap();
let val = valbuf.to_bytes_with_nul(); let val = valbuf.to_bytes_with_nul();
rocksdb_put(db, rocksdb_put(
woptions, db,
key.as_ptr() as *const c_char, woptions,
key.len() as size_t, key.as_ptr() as *const c_char,
val.as_ptr() as *const c_char, key.len() as size_t,
val.len() as size_t, val.as_ptr() as *const c_char,
&mut err); val.len() as size_t,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
} }
rocksdb_approximate_sizes(db, rocksdb_approximate_sizes(
2, db,
start.as_ptr(), 2,
start_len.as_ptr(), start.as_ptr(),
limit.as_ptr(), start_len.as_ptr(),
limit_len.as_ptr(), limit.as_ptr(),
sizes.as_mut_ptr()); limit_len.as_ptr(),
sizes.as_mut_ptr(),
);
CheckCondition!(sizes[0] > 0); CheckCondition!(sizes[0] > 0);
CheckCondition!(sizes[1] > 0); CheckCondition!(sizes[1] > 0);
} }
@ -733,12 +797,14 @@ fn ffi() {
// First run uses custom filter, second run uses bloom filter // First run uses custom filter, second run uses bloom filter
CheckNoError!(err); CheckNoError!(err);
let mut policy: *mut rocksdb_filterpolicy_t = if run == 0 { let mut policy: *mut rocksdb_filterpolicy_t = if run == 0 {
rocksdb_filterpolicy_create(ptr::null_mut(), rocksdb_filterpolicy_create(
Some(FilterDestroy), ptr::null_mut(),
Some(FilterCreate), Some(FilterDestroy),
Some(FilterKeyMatch), Some(FilterCreate),
None, Some(FilterKeyMatch),
Some(FilterName)) None,
Some(FilterName),
)
} else { } else {
rocksdb_filterpolicy_create_bloom(10) rocksdb_filterpolicy_create_bloom(10)
}; };
@ -751,21 +817,25 @@ fn ffi() {
rocksdb_options_set_block_based_table_factory(options, table_options); rocksdb_options_set_block_based_table_factory(options, table_options);
db = rocksdb_open(options, dbname, &mut err); db = rocksdb_open(options, dbname, &mut err);
CheckNoError!(err); CheckNoError!(err);
rocksdb_put(db, rocksdb_put(
woptions, db,
cstrp!("foo"), woptions,
3, cstrp!("foo"),
cstrp!("foovalue"), 3,
8, cstrp!("foovalue"),
&mut err); 8,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
rocksdb_put(db, rocksdb_put(
woptions, db,
cstrp!("bar"), woptions,
3, cstrp!("bar"),
cstrp!("barvalue"), 3,
8, cstrp!("barvalue"),
&mut err); 8,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
rocksdb_compact_range(db, ptr::null(), 0, ptr::null(), 0); rocksdb_compact_range(db, ptr::null(), 0, ptr::null(), 0);
@ -791,10 +861,12 @@ fn ffi() {
{ {
let options_with_filter = rocksdb_options_create(); let options_with_filter = rocksdb_options_create();
rocksdb_options_set_create_if_missing(options_with_filter, 1); rocksdb_options_set_create_if_missing(options_with_filter, 1);
let cfilter = rocksdb_compactionfilter_create(ptr::null_mut(), let cfilter = rocksdb_compactionfilter_create(
Some(CFilterDestroy), ptr::null_mut(),
Some(CFilterFilter), Some(CFilterDestroy),
Some(CFilterName)); Some(CFilterFilter),
Some(CFilterName),
);
// Create new database // Create new database
rocksdb_close(db); rocksdb_close(db);
rocksdb_destroy_db(options_with_filter, dbname, &mut err); rocksdb_destroy_db(options_with_filter, dbname, &mut err);
@ -810,62 +882,74 @@ fn ffi() {
{ {
let mut options_with_filter_factory = rocksdb_options_create(); let mut options_with_filter_factory = rocksdb_options_create();
rocksdb_options_set_create_if_missing(options_with_filter_factory, 1); rocksdb_options_set_create_if_missing(options_with_filter_factory, 1);
let mut factory = rocksdb_compactionfilterfactory_create(ptr::null_mut(), let mut factory = rocksdb_compactionfilterfactory_create(
Some(CFilterFactoryDestroy), ptr::null_mut(),
Some(CFilterCreate), Some(CFilterFactoryDestroy),
Some(CFilterFactoryName)); Some(CFilterCreate),
Some(CFilterFactoryName),
);
// Create new database // Create new database
rocksdb_close(db); rocksdb_close(db);
rocksdb_destroy_db(options_with_filter_factory, dbname, &mut err); rocksdb_destroy_db(options_with_filter_factory, dbname, &mut err);
rocksdb_options_set_compaction_filter_factory(options_with_filter_factory, factory); rocksdb_options_set_compaction_filter_factory(options_with_filter_factory, factory);
db = CheckCompaction(dbname, db, options_with_filter_factory, roptions, woptions); db = CheckCompaction(dbname, db, options_with_filter_factory, roptions, woptions);
rocksdb_options_set_compaction_filter_factory(options_with_filter_factory, rocksdb_options_set_compaction_filter_factory(
ptr::null_mut()); options_with_filter_factory,
ptr::null_mut(),
);
rocksdb_options_destroy(options_with_filter_factory); rocksdb_options_destroy(options_with_filter_factory);
} }
StartPhase("merge_operator"); StartPhase("merge_operator");
{ {
let mut merge_operator = rocksdb_mergeoperator_create(ptr::null_mut(), let mut merge_operator = rocksdb_mergeoperator_create(
Some(MergeOperatorDestroy), ptr::null_mut(),
Some(MergeOperatorFullMerge), Some(MergeOperatorDestroy),
Some(MergeOperatorPartialMerge), Some(MergeOperatorFullMerge),
None, Some(MergeOperatorPartialMerge),
Some(MergeOperatorName)); None,
Some(MergeOperatorName),
);
// Create new database // Create new database
rocksdb_close(db); rocksdb_close(db);
rocksdb_destroy_db(options, dbname, &mut err); rocksdb_destroy_db(options, dbname, &mut err);
rocksdb_options_set_merge_operator(options, merge_operator); rocksdb_options_set_merge_operator(options, merge_operator);
db = rocksdb_open(options, dbname, &mut err); db = rocksdb_open(options, dbname, &mut err);
CheckNoError!(err); CheckNoError!(err);
rocksdb_put(db, rocksdb_put(
woptions, db,
cstrp!("foo"), woptions,
3, cstrp!("foo"),
cstrp!("foovalue"), 3,
8, cstrp!("foovalue"),
&mut err); 8,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
CheckGet(db, roptions, cstrp!("foo"), cstrp!("foovalue")); CheckGet(db, roptions, cstrp!("foo"), cstrp!("foovalue"));
rocksdb_merge(db, rocksdb_merge(
woptions, db,
cstrp!("foo"), woptions,
3, cstrp!("foo"),
cstrp!("barvalue"), 3,
8, cstrp!("barvalue"),
&mut err); 8,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
CheckGet(db, roptions, cstrp!("foo"), cstrp!("fake")); CheckGet(db, roptions, cstrp!("foo"), cstrp!("fake"));
// Merge of a non-existing value // Merge of a non-existing value
rocksdb_merge(db, rocksdb_merge(
woptions, db,
cstrp!("bar"), woptions,
3, cstrp!("bar"),
cstrp!("barvalue"), 3,
8, cstrp!("barvalue"),
&mut err); 8,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
CheckGet(db, roptions, cstrp!("bar"), cstrp!("fake")); CheckGet(db, roptions, cstrp!("bar"), cstrp!("fake"));
} }
@ -898,25 +982,29 @@ fn ffi() {
let mut cf_names: [*const c_char; 2] = [cstrp!("default"), cstrp!("cf1")]; let mut cf_names: [*const c_char; 2] = [cstrp!("default"), cstrp!("cf1")];
let mut cf_opts: [*const rocksdb_options_t; 2] = [cf_options, cf_options]; let mut cf_opts: [*const rocksdb_options_t; 2] = [cf_options, cf_options];
let mut handles: [*mut rocksdb_column_family_handle_t; 2] = [ptr::null_mut(), let mut handles: [*mut rocksdb_column_family_handle_t; 2] =
ptr::null_mut()]; [ptr::null_mut(), ptr::null_mut()];
db = rocksdb_open_column_families(db_options, db = rocksdb_open_column_families(
dbname, db_options,
2, dbname,
cf_names.as_mut_ptr(), 2,
cf_opts.as_mut_ptr(), cf_names.as_mut_ptr(),
handles.as_mut_ptr(), cf_opts.as_mut_ptr(),
&mut err); handles.as_mut_ptr(),
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
rocksdb_put_cf(db, rocksdb_put_cf(
woptions, db,
handles[1], woptions,
cstrp!("foo"), handles[1],
3, cstrp!("foo"),
cstrp!("hello"), 3,
5, cstrp!("hello"),
&mut err); 5,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
CheckGetCF(db, roptions, handles[1], cstrp!("foo"), cstrp!("hello")); CheckGetCF(db, roptions, handles[1], cstrp!("foo"), cstrp!("hello"));
@ -940,21 +1028,23 @@ fn ffi() {
rocksdb_writebatch_destroy(wb); rocksdb_writebatch_destroy(wb);
let keys: [*const c_char; 3] = [cstrp!("box"), cstrp!("box"), cstrp!("barfooxx")]; let keys: [*const c_char; 3] = [cstrp!("box"), cstrp!("box"), cstrp!("barfooxx")];
let get_handles: [*const rocksdb_column_family_handle_t; 3] = [handles[0], handles[1], let get_handles: [*const rocksdb_column_family_handle_t; 3] =
handles[1]]; [handles[0], handles[1], handles[1]];
let keys_sizes: [size_t; 3] = [3, 3, 8]; let keys_sizes: [size_t; 3] = [3, 3, 8];
let mut vals: [*mut c_char; 3] = [ptr::null_mut(), ptr::null_mut(), ptr::null_mut()]; let mut vals: [*mut c_char; 3] = [ptr::null_mut(), ptr::null_mut(), ptr::null_mut()];
let mut vals_sizes: [size_t; 3] = [0, 0, 0]; let mut vals_sizes: [size_t; 3] = [0, 0, 0];
let mut errs: [*mut c_char; 3] = [ptr::null_mut(), ptr::null_mut(), ptr::null_mut()]; let mut errs: [*mut c_char; 3] = [ptr::null_mut(), ptr::null_mut(), ptr::null_mut()];
rocksdb_multi_get_cf(db, rocksdb_multi_get_cf(
roptions, db,
get_handles.as_ptr(), roptions,
3, get_handles.as_ptr(),
keys.as_ptr(), 3,
keys_sizes.as_ptr(), keys.as_ptr(),
vals.as_mut_ptr(), keys_sizes.as_ptr(),
vals_sizes.as_mut_ptr(), vals.as_mut_ptr(),
errs.as_mut_ptr()); vals_sizes.as_mut_ptr(),
errs.as_mut_ptr(),
);
for i in 0..3 { for i in 0..3 {
CheckEqual(ptr::null(), errs[i], 0); CheckEqual(ptr::null(), errs[i], 0);
@ -982,16 +1072,18 @@ fn ffi() {
CheckNoError!(err); CheckNoError!(err);
rocksdb_iter_destroy(iter); rocksdb_iter_destroy(iter);
let mut iters_cf_handles: [*mut rocksdb_column_family_handle_t; 2] = [handles[0], let mut iters_cf_handles: [*mut rocksdb_column_family_handle_t; 2] =
handles[1]]; [handles[0], handles[1]];
let mut iters_handles: [*mut rocksdb_iterator_t; 2] = [ptr::null_mut(), let mut iters_handles: [*mut rocksdb_iterator_t; 2] =
ptr::null_mut()]; [ptr::null_mut(), ptr::null_mut()];
rocksdb_create_iterators(db, rocksdb_create_iterators(
roptions, db,
iters_cf_handles.as_mut_ptr(), roptions,
iters_handles.as_mut_ptr(), iters_cf_handles.as_mut_ptr(),
2, iters_handles.as_mut_ptr(),
&mut err); 2,
&mut err,
);
CheckNoError!(err); CheckNoError!(err);
iter = iters_handles[0]; iter = iters_handles[0];
@ -1030,8 +1122,10 @@ fn ffi() {
{ {
// Create new database // Create new database
rocksdb_options_set_allow_mmap_reads(options, 1); rocksdb_options_set_allow_mmap_reads(options, 1);
rocksdb_options_set_prefix_extractor(options, rocksdb_options_set_prefix_extractor(
rocksdb_slicetransform_create_fixed_prefix(3)); options,
rocksdb_slicetransform_create_fixed_prefix(3),
);
rocksdb_options_set_hash_skip_list_rep(options, 5000, 4, 4); rocksdb_options_set_hash_skip_list_rep(options, 5000, 4, 4);
rocksdb_options_set_plain_table_factory(options, 4, 10, 0.75, 16); rocksdb_options_set_plain_table_factory(options, 4, 10, 0.75, 16);
rocksdb_options_set_allow_concurrent_memtable_write(options, 0); rocksdb_options_set_allow_concurrent_memtable_write(options, 0);

@ -1,4 +0,0 @@
reorder_imports = true
max_width = 100
ideal_width = 100
trailing_comma = always

@ -13,9 +13,8 @@
// limitations under the License. // limitations under the License.
// //
use {DB, Error};
use ffi; use ffi;
use {Error, DB};
use libc::{c_int, uint32_t}; use libc::{c_int, uint32_t};
use std::ffi::CString; use std::ffi::CString;
@ -45,7 +44,7 @@ impl BackupEngine {
Err(_) => { Err(_) => {
return Err(Error::new( return Err(Error::new(
"Failed to convert path to CString \ "Failed to convert path to CString \
when opening backup engine" when opening backup engine"
.to_owned(), .to_owned(),
)) ))
} }
@ -64,8 +63,7 @@ impl BackupEngine {
pub fn create_new_backup(&mut self, db: &DB) -> Result<(), Error> { pub fn create_new_backup(&mut self, db: &DB) -> Result<(), Error> {
unsafe { unsafe {
ffi_try!(ffi::rocksdb_backup_engine_create_new_backup( ffi_try!(ffi::rocksdb_backup_engine_create_new_backup(
self.inner, self.inner, db.inner,
db.inner,
)); ));
Ok(()) Ok(())
} }

@ -13,14 +13,13 @@
// limitations under the License. // limitations under the License.
// //
///! Implementation of bindings to RocksDB Checkpoint[1] API
///
/// [1]: https://github.com/facebook/rocksdb/wiki/Checkpoints
use {DB, Error};
use ffi; use ffi;
use std::ffi::CString; use std::ffi::CString;
use std::path::Path; use std::path::Path;
///! Implementation of bindings to RocksDB Checkpoint[1] API
///
/// [1]: https://github.com/facebook/rocksdb/wiki/Checkpoints
use {Error, DB};
/// Undocumented parameter for `ffi::rocksdb_checkpoint_create` function. Zero by default. /// Undocumented parameter for `ffi::rocksdb_checkpoint_create` function. Zero by default.
const LOG_SIZE_FOR_FLUSH: u64 = 0_u64; const LOG_SIZE_FOR_FLUSH: u64 = 0_u64;
@ -45,9 +44,7 @@ impl Checkpoint {
return Err(Error::new("Could not create checkpoint object.".to_owned())); return Err(Error::new("Could not create checkpoint object.".to_owned()));
} }
Ok(Checkpoint { Ok(Checkpoint { inner: checkpoint })
inner: checkpoint,
})
} }
/// Creates new physical DB checkpoint in directory specified by `path`. /// Creates new physical DB checkpoint in directory specified by `path`.
@ -57,14 +54,17 @@ impl Checkpoint {
Ok(c) => c, Ok(c) => c,
Err(_) => { Err(_) => {
return Err(Error::new( return Err(Error::new(
"Failed to convert path to CString when creating DB checkpoint" "Failed to convert path to CString when creating DB checkpoint".to_owned(),
.to_owned(),
)); ));
} }
}; };
unsafe { unsafe {
ffi_try!(ffi::rocksdb_checkpoint_create(self.inner, cpath.as_ptr(), LOG_SIZE_FOR_FLUSH,)); ffi_try!(ffi::rocksdb_checkpoint_create(
self.inner,
cpath.as_ptr(),
LOG_SIZE_FOR_FLUSH,
));
Ok(()) Ok(())
} }

@ -32,7 +32,6 @@ pub enum Decision {
Change(&'static [u8]), Change(&'static [u8]),
} }
/// Function to filter compaction with. /// Function to filter compaction with.
/// ///
/// This function takes the level of compaction, the key, and the existing value /// This function takes the level of compaction, the key, and the existing value
@ -46,8 +45,7 @@ impl<F> CompactionFilterFn for F
where where
F: FnMut(u32, &[u8], &[u8]) -> Decision, F: FnMut(u32, &[u8], &[u8]) -> Decision,
F: Send + 'static, F: Send + 'static,
{ {}
}
pub struct CompactionFilterCallback<F> pub struct CompactionFilterCallback<F>
where where
@ -117,7 +115,7 @@ fn test_filter(level: u32, key: &[u8], value: &[u8]) -> Decision {
#[test] #[test]
fn compaction_filter_test() { fn compaction_filter_test() {
use {DB, Options}; use {Options, DB};
let path = "_rust_rocksdb_filtertest"; let path = "_rust_rocksdb_filtertest";
let mut opts = Options::default(); let mut opts = Options::default();
@ -133,5 +131,4 @@ fn compaction_filter_test() {
assert!(db.get(b"_k").unwrap().is_none()); assert!(db.get(b"_k").unwrap().is_none());
assert_eq!(&*db.get(b"%k").unwrap().unwrap(), b"secret"); assert_eq!(&*db.get(b"%k").unwrap().unwrap(), b"secret");
} }
} }

@ -13,7 +13,6 @@
// limitations under the License. // limitations under the License.
// //
use libc::{c_char, c_int, c_void, size_t}; use libc::{c_char, c_int, c_void, size_t};
use std::cmp::Ordering; use std::cmp::Ordering;
use std::ffi::CString; use std::ffi::CString;

@ -13,13 +13,13 @@
// limitations under the License. // limitations under the License.
// //
use {DB, Error, Options, WriteOptions, ColumnFamily, ColumnFamilyDescriptor};
use ffi; use ffi;
use ffi_util::opt_bytes_to_ptr; use ffi_util::opt_bytes_to_ptr;
use {ColumnFamily, ColumnFamilyDescriptor, Error, Options, WriteOptions, DB};
use libc::{self, c_char, c_int, c_uchar, c_void, size_t}; use libc::{self, c_char, c_int, c_uchar, c_void, size_t};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::ffi::CStr;
use std::ffi::CString; use std::ffi::CString;
use std::fmt; use std::fmt;
use std::fs; use std::fs;
@ -28,7 +28,6 @@ use std::path::Path;
use std::ptr; use std::ptr;
use std::slice; use std::slice;
use std::str; use std::str;
use std::ffi::CStr;
pub fn new_bloom_filter(bits: c_int) -> *mut ffi::rocksdb_filterpolicy_t { pub fn new_bloom_filter(bits: c_int) -> *mut ffi::rocksdb_filterpolicy_t {
unsafe { ffi::rocksdb_filterpolicy_create_bloom(bits) } unsafe { ffi::rocksdb_filterpolicy_create_bloom(bits) }
@ -150,7 +149,6 @@ pub struct DBRawIterator {
inner: *mut ffi::rocksdb_iterator_t, inner: *mut ffi::rocksdb_iterator_t,
} }
/// An iterator over a database or column family, with specifiable /// An iterator over a database or column family, with specifiable
/// ranges and direction. /// ranges and direction.
/// ///
@ -201,7 +199,11 @@ pub enum IteratorMode<'a> {
impl DBRawIterator { impl DBRawIterator {
fn new(db: &DB, readopts: &ReadOptions) -> DBRawIterator { fn new(db: &DB, readopts: &ReadOptions) -> DBRawIterator {
unsafe { DBRawIterator { inner: ffi::rocksdb_create_iterator(db.inner, readopts.inner) } } unsafe {
DBRawIterator {
inner: ffi::rocksdb_create_iterator(db.inner, readopts.inner),
}
}
} }
fn new_cf( fn new_cf(
@ -581,10 +583,13 @@ impl<'a> Drop for Snapshot<'a> {
impl ColumnFamilyDescriptor { impl ColumnFamilyDescriptor {
// Create a new column family descriptor with the specified name and options. // Create a new column family descriptor with the specified name and options.
pub fn new<S>(name: S, options: Options) -> Self where S: Into<String> { pub fn new<S>(name: S, options: Options) -> Self
where
S: Into<String>,
{
ColumnFamilyDescriptor { ColumnFamilyDescriptor {
name: name.into(), name: name.into(),
options options,
} }
} }
} }
@ -606,20 +611,28 @@ impl DB {
/// ///
/// Column families opened using this function will be created with default `Options`. /// Column families opened using this function will be created with default `Options`.
pub fn open_cf<P: AsRef<Path>>(opts: &Options, path: P, cfs: &[&str]) -> Result<DB, Error> { pub fn open_cf<P: AsRef<Path>>(opts: &Options, path: P, cfs: &[&str]) -> Result<DB, Error> {
let cfs_v = cfs.to_vec().iter().map(|name| ColumnFamilyDescriptor::new(*name, Options::default())).collect(); let cfs_v = cfs
.to_vec()
.iter()
.map(|name| ColumnFamilyDescriptor::new(*name, Options::default()))
.collect();
DB::open_cf_descriptors(opts, path, cfs_v) DB::open_cf_descriptors(opts, path, cfs_v)
} }
/// Open a database with the given database options and column family names/options. /// Open a database with the given database options and column family names/options.
pub fn open_cf_descriptors<P: AsRef<Path>>(opts: &Options, path: P, cfs: Vec<ColumnFamilyDescriptor>) -> Result<DB, Error> { pub fn open_cf_descriptors<P: AsRef<Path>>(
opts: &Options,
path: P,
cfs: Vec<ColumnFamilyDescriptor>,
) -> Result<DB, Error> {
let path = path.as_ref(); let path = path.as_ref();
let cpath = match CString::new(path.to_string_lossy().as_bytes()) { let cpath = match CString::new(path.to_string_lossy().as_bytes()) {
Ok(c) => c, Ok(c) => c,
Err(_) => { Err(_) => {
return Err(Error::new( return Err(Error::new(
"Failed to convert path to CString \ "Failed to convert path to CString \
when opening DB." when opening DB."
.to_owned(), .to_owned(),
)) ))
} }
@ -628,7 +641,7 @@ impl DB {
if let Err(e) = fs::create_dir_all(&path) { if let Err(e) = fs::create_dir_all(&path) {
return Err(Error::new(format!( return Err(Error::new(format!(
"Failed to create RocksDB\ "Failed to create RocksDB\
directory: `{:?}`.", directory: `{:?}`.",
e e
))); )));
} }
@ -646,7 +659,7 @@ impl DB {
if !cfs_v.iter().any(|cf| cf.name == "default") { if !cfs_v.iter().any(|cf| cf.name == "default") {
cfs_v.push(ColumnFamilyDescriptor { cfs_v.push(ColumnFamilyDescriptor {
name: String::from("default"), name: String::from("default"),
options: Options::default() options: Options::default(),
}); });
} }
// We need to store our CStrings in an intermediate vector // We need to store our CStrings in an intermediate vector
@ -661,7 +674,8 @@ impl DB {
// These handles will be populated by DB. // These handles will be populated by DB.
let mut cfhandles: Vec<_> = cfs_v.iter().map(|_| ptr::null_mut()).collect(); let mut cfhandles: Vec<_> = cfs_v.iter().map(|_| ptr::null_mut()).collect();
let mut cfopts: Vec<_> = cfs_v.iter() let mut cfopts: Vec<_> = cfs_v
.iter()
.map(|cf| cf.options.inner as *const _) .map(|cf| cf.options.inner as *const _)
.collect(); .collect();
@ -672,14 +686,15 @@ impl DB {
cfs_v.len() as c_int, cfs_v.len() as c_int,
cfnames.as_mut_ptr(), cfnames.as_mut_ptr(),
cfopts.as_mut_ptr(), cfopts.as_mut_ptr(),
cfhandles.as_mut_ptr(),)); cfhandles.as_mut_ptr(),
));
} }
for handle in &cfhandles { for handle in &cfhandles {
if handle.is_null() { if handle.is_null() {
return Err(Error::new( return Err(Error::new(
"Received null column family \ "Received null column family \
handle from DB." handle from DB."
.to_owned(), .to_owned(),
)); ));
} }
@ -707,7 +722,7 @@ impl DB {
Err(_) => { Err(_) => {
return Err(Error::new( return Err(Error::new(
"Failed to convert path to CString \ "Failed to convert path to CString \
when opening DB." when opening DB."
.to_owned(), .to_owned(),
)) ))
} }
@ -731,7 +746,6 @@ impl DB {
} }
} }
pub fn destroy<P: AsRef<Path>>(opts: &Options, path: P) -> Result<(), Error> { pub fn destroy<P: AsRef<Path>>(opts: &Options, path: P) -> Result<(), Error> {
let cpath = CString::new(path.as_ref().to_string_lossy().as_bytes()).unwrap(); let cpath = CString::new(path.as_ref().to_string_lossy().as_bytes()).unwrap();
unsafe { unsafe {
@ -773,10 +787,10 @@ impl DB {
if readopts.inner.is_null() { if readopts.inner.is_null() {
return Err(Error::new( return Err(Error::new(
"Unable to create RocksDB read options. \ "Unable to create RocksDB read options. \
This is a fairly trivial call, and its \ This is a fairly trivial call, and its \
failure may be indicative of a \ failure may be indicative of a \
mis-compiled or mis-loaded RocksDB \ mis-compiled or mis-loaded RocksDB \
library." library."
.to_owned(), .to_owned(),
)); ));
} }
@ -812,10 +826,10 @@ impl DB {
if readopts.inner.is_null() { if readopts.inner.is_null() {
return Err(Error::new( return Err(Error::new(
"Unable to create RocksDB read options. \ "Unable to create RocksDB read options. \
This is a fairly trivial call, and its \ This is a fairly trivial call, and its \
failure may be indicative of a \ failure may be indicative of a \
mis-compiled or mis-loaded RocksDB \ mis-compiled or mis-loaded RocksDB \
library." library."
.to_owned(), .to_owned(),
)); ));
} }
@ -848,7 +862,7 @@ impl DB {
Err(_) => { Err(_) => {
return Err(Error::new( return Err(Error::new(
"Failed to convert path to CString \ "Failed to convert path to CString \
when opening rocksdb" when opening rocksdb"
.to_owned(), .to_owned(),
)) ))
} }
@ -929,11 +943,16 @@ impl DB {
pub fn prefix_iterator_cf<'a>( pub fn prefix_iterator_cf<'a>(
&self, &self,
cf_handle: ColumnFamily, cf_handle: ColumnFamily,
prefix: &'a [u8] prefix: &'a [u8],
) -> Result<DBIterator, Error> { ) -> Result<DBIterator, Error> {
let mut opts = ReadOptions::default(); let mut opts = ReadOptions::default();
opts.set_prefix_same_as_start(true); opts.set_prefix_same_as_start(true);
DBIterator::new_cf(self, cf_handle, &opts, IteratorMode::From(prefix, Direction::Forward)) DBIterator::new_cf(
self,
cf_handle,
&opts,
IteratorMode::From(prefix, Direction::Forward),
)
} }
pub fn raw_iterator(&self) -> DBRawIterator { pub fn raw_iterator(&self) -> DBRawIterator {
@ -1207,7 +1226,9 @@ impl WriteBatch {
impl Default for WriteBatch { impl Default for WriteBatch {
fn default() -> WriteBatch { fn default() -> WriteBatch {
WriteBatch { inner: unsafe { ffi::rocksdb_writebatch_create() } } WriteBatch {
inner: unsafe { ffi::rocksdb_writebatch_create() },
}
} }
} }
@ -1268,21 +1289,21 @@ impl ReadOptions {
} }
pub fn set_prefix_same_as_start(&mut self, v: bool) { pub fn set_prefix_same_as_start(&mut self, v: bool) {
unsafe { unsafe { ffi::rocksdb_readoptions_set_prefix_same_as_start(self.inner, v as c_uchar) }
ffi::rocksdb_readoptions_set_prefix_same_as_start(self.inner, v as c_uchar)
}
} }
pub fn set_total_order_seek(&mut self, v:bool) { pub fn set_total_order_seek(&mut self, v: bool) {
unsafe { unsafe { ffi::rocksdb_readoptions_set_total_order_seek(self.inner, v as c_uchar) }
ffi::rocksdb_readoptions_set_total_order_seek(self.inner, v as c_uchar)
}
} }
} }
impl Default for ReadOptions { impl Default for ReadOptions {
fn default() -> ReadOptions { fn default() -> ReadOptions {
unsafe { ReadOptions { inner: ffi::rocksdb_readoptions_create() } } unsafe {
ReadOptions {
inner: ffi::rocksdb_readoptions_create(),
}
}
} }
} }
@ -1352,7 +1373,6 @@ fn test_db_vector() {
assert_eq!(&*v, &ctrl[..]); assert_eq!(&*v, &ctrl[..]);
} }
#[test] #[test]
fn external() { fn external() {
let path = "_rust_rocksdb_externaltest"; let path = "_rust_rocksdb_externaltest";

@ -18,14 +18,17 @@ use std::path::Path;
use libc::{self, c_int, c_uchar, c_uint, c_void, size_t, uint64_t}; use libc::{self, c_int, c_uchar, c_uint, c_void, size_t, uint64_t};
use ffi; use compaction_filter::{self, filter_callback, CompactionFilterCallback, CompactionFilterFn};
use {BlockBasedOptions, BlockBasedIndexType, DBCompactionStyle, DBCompressionType, DBRecoveryMode, MemtableFactory,
Options, WriteOptions};
use compaction_filter::{self, CompactionFilterCallback, CompactionFilterFn, filter_callback};
use comparator::{self, ComparatorCallback, CompareFn}; use comparator::{self, ComparatorCallback, CompareFn};
use merge_operator::{self, MergeFn, MergeOperatorCallback, full_merge_callback, use ffi;
partial_merge_callback}; use merge_operator::{
self, full_merge_callback, partial_merge_callback, MergeFn, MergeOperatorCallback,
};
use slice_transform::SliceTransform; use slice_transform::SliceTransform;
use {
BlockBasedIndexType, BlockBasedOptions, DBCompactionStyle, DBCompressionType, DBRecoveryMode,
MemtableFactory, Options, WriteOptions,
};
pub fn new_cache(capacity: size_t) -> *mut ffi::rocksdb_cache_t { pub fn new_cache(capacity: size_t) -> *mut ffi::rocksdb_cache_t {
unsafe { ffi::rocksdb_cache_create_lru(capacity) } unsafe { ffi::rocksdb_cache_create_lru(capacity) }
@ -189,7 +192,10 @@ impl Options {
/// ``` /// ```
pub fn create_missing_column_families(&mut self, create_missing_cfs: bool) { pub fn create_missing_column_families(&mut self, create_missing_cfs: bool) {
unsafe { unsafe {
ffi::rocksdb_options_set_create_missing_column_families(self.inner, create_missing_cfs as c_uchar); ffi::rocksdb_options_set_create_missing_column_families(
self.inner,
create_missing_cfs as c_uchar,
);
} }
} }
@ -256,14 +262,19 @@ impl Options {
/// Default: `0` /// Default: `0`
pub fn set_compaction_readahead_size(&mut self, compaction_readahead_size: usize) { pub fn set_compaction_readahead_size(&mut self, compaction_readahead_size: usize) {
unsafe { unsafe {
ffi::rocksdb_options_compaction_readahead_size(self.inner, compaction_readahead_size as usize); ffi::rocksdb_options_compaction_readahead_size(
self.inner,
compaction_readahead_size as usize,
);
} }
} }
pub fn set_merge_operator(&mut self, name: &str, pub fn set_merge_operator(
full_merge_fn: MergeFn, &mut self,
partial_merge_fn: Option<MergeFn>) { name: &str,
full_merge_fn: MergeFn,
partial_merge_fn: Option<MergeFn>,
) {
let cb = Box::new(MergeOperatorCallback { let cb = Box::new(MergeOperatorCallback {
name: CString::new(name.as_bytes()).unwrap(), name: CString::new(name.as_bytes()).unwrap(),
full_merge_fn: full_merge_fn, full_merge_fn: full_merge_fn,
@ -283,8 +294,10 @@ impl Options {
} }
} }
#[deprecated(since = "0.5.0", #[deprecated(
note = "add_merge_operator has been renamed to set_merge_operator")] since = "0.5.0",
note = "add_merge_operator has been renamed to set_merge_operator"
)]
pub fn add_merge_operator(&mut self, name: &str, merge_fn: MergeFn) { pub fn add_merge_operator(&mut self, name: &str, merge_fn: MergeFn) {
self.set_merge_operator(name, merge_fn, None); self.set_merge_operator(name, merge_fn, None);
} }
@ -343,14 +356,13 @@ impl Options {
} }
pub fn set_prefix_extractor(&mut self, prefix_extractor: SliceTransform) { pub fn set_prefix_extractor(&mut self, prefix_extractor: SliceTransform) {
unsafe { unsafe { ffi::rocksdb_options_set_prefix_extractor(self.inner, prefix_extractor.inner) }
ffi::rocksdb_options_set_prefix_extractor(
self.inner, prefix_extractor.inner
)
}
} }
#[deprecated(since = "0.5.0", note = "add_comparator has been renamed to set_comparator")] #[deprecated(
since = "0.5.0",
note = "add_comparator has been renamed to set_comparator"
)]
pub fn add_comparator(&mut self, name: &str, compare_fn: CompareFn) { pub fn add_comparator(&mut self, name: &str, compare_fn: CompareFn) {
self.set_comparator(name, compare_fn); self.set_comparator(name, compare_fn);
} }
@ -532,8 +544,10 @@ impl Options {
/// let mut opts = Options::default(); /// let mut opts = Options::default();
/// opts.set_allow_os_buffer(false); /// opts.set_allow_os_buffer(false);
/// ``` /// ```
#[deprecated(since = "0.7.0", #[deprecated(
note = "replaced with set_use_direct_reads/set_use_direct_io_for_flush_and_compaction methods")] since = "0.7.0",
note = "replaced with set_use_direct_reads/set_use_direct_io_for_flush_and_compaction methods"
)]
pub fn set_allow_os_buffer(&mut self, is_allow: bool) { pub fn set_allow_os_buffer(&mut self, is_allow: bool) {
self.set_use_direct_reads(!is_allow); self.set_use_direct_reads(!is_allow);
self.set_use_direct_io_for_flush_and_compaction(!is_allow); self.set_use_direct_io_for_flush_and_compaction(!is_allow);
@ -844,7 +858,6 @@ impl Options {
} }
} }
/// Sets the maximum number of concurrent background compaction jobs, submitted to /// Sets the maximum number of concurrent background compaction jobs, submitted to
/// the default LOW priority thread pool. /// the default LOW priority thread pool.
/// We first try to schedule compactions based on /// We first try to schedule compactions based on
@ -1049,9 +1062,7 @@ impl Options {
/// ///
/// Default: `true` /// Default: `true`
pub fn set_advise_random_on_open(&mut self, advise: bool) { pub fn set_advise_random_on_open(&mut self, advise: bool) {
unsafe { unsafe { ffi::rocksdb_options_set_advise_random_on_open(self.inner, advise as c_uchar) }
ffi::rocksdb_options_set_advise_random_on_open(self.inner, advise as c_uchar)
}
} }
/// Sets the number of levels for this database. /// Sets the number of levels for this database.

@ -54,17 +54,18 @@ mod ffi_util;
pub mod backup; pub mod backup;
pub mod checkpoint; pub mod checkpoint;
mod comparator;
pub mod merge_operator;
pub mod compaction_filter; pub mod compaction_filter;
mod comparator;
mod db; mod db;
mod db_options; mod db_options;
pub mod merge_operator;
mod slice_transform; mod slice_transform;
pub use compaction_filter::Decision as CompactionDecision; pub use compaction_filter::Decision as CompactionDecision;
pub use db::{DBCompactionStyle, DBCompressionType, DBIterator, DBRawIterator, DBRecoveryMode, pub use db::{
DBVector, ReadOptions, Direction, IteratorMode, Snapshot, WriteBatch, new_bloom_filter, DBCompactionStyle, DBCompressionType, DBIterator, DBRawIterator,
new_bloom_filter}; DBRecoveryMode, DBVector, Direction, IteratorMode, ReadOptions, Snapshot, WriteBatch,
};
pub use slice_transform::SliceTransform; pub use slice_transform::SliceTransform;
@ -155,8 +156,14 @@ pub enum BlockBasedIndexType {
/// See https://github.com/facebook/rocksdb/wiki/MemTable for more information. /// See https://github.com/facebook/rocksdb/wiki/MemTable for more information.
pub enum MemtableFactory { pub enum MemtableFactory {
Vector, Vector,
HashSkipList { bucket_count: usize, height: i32, branching_factor: i32 }, HashSkipList {
HashLinkList { bucket_count: usize } bucket_count: usize,
height: i32,
branching_factor: i32,
},
HashLinkList {
bucket_count: usize,
},
} }
/// Database-wide options around performance and behavior. /// Database-wide options around performance and behavior.
@ -222,7 +229,6 @@ pub struct WriteOptions {
inner: *mut ffi::rocksdb_writeoptions_t, inner: *mut ffi::rocksdb_writeoptions_t,
} }
/// An opaque type used to represent a column family. Returned from some functions, and used /// An opaque type used to represent a column family. Returned from some functions, and used
/// in others /// in others
#[derive(Copy, Clone)] #[derive(Copy, Clone)]

@ -53,7 +53,6 @@
//! } //! }
//! ``` //! ```
use libc::{self, c_char, c_int, c_void, size_t}; use libc::{self, c_char, c_int, c_void, size_t};
use std::ffi::CString; use std::ffi::CString;
use std::mem; use std::mem;
@ -63,394 +62,391 @@ use std::slice;
pub type MergeFn = fn(&[u8], Option<&[u8]>, &mut MergeOperands) -> Option<Vec<u8>>; pub type MergeFn = fn(&[u8], Option<&[u8]>, &mut MergeOperands) -> Option<Vec<u8>>;
pub struct MergeOperatorCallback { pub struct MergeOperatorCallback {
pub name: CString, pub name: CString,
pub full_merge_fn: MergeFn, pub full_merge_fn: MergeFn,
pub partial_merge_fn: MergeFn, pub partial_merge_fn: MergeFn,
} }
pub unsafe extern "C" fn destructor_callback(raw_cb: *mut c_void) { pub unsafe extern "C" fn destructor_callback(raw_cb: *mut c_void) {
let _: Box<MergeOperatorCallback> = mem::transmute(raw_cb); let _: Box<MergeOperatorCallback> = mem::transmute(raw_cb);
} }
pub unsafe extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char { pub unsafe extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char {
let cb = &mut *(raw_cb as *mut MergeOperatorCallback); let cb = &mut *(raw_cb as *mut MergeOperatorCallback);
cb.name.as_ptr() cb.name.as_ptr()
} }
pub unsafe extern "C" fn full_merge_callback( pub unsafe extern "C" fn full_merge_callback(
raw_cb: *mut c_void, raw_cb: *mut c_void,
raw_key: *const c_char, raw_key: *const c_char,
key_len: size_t, key_len: size_t,
existing_value: *const c_char, existing_value: *const c_char,
existing_value_len: size_t, existing_value_len: size_t,
operands_list: *const *const c_char, operands_list: *const *const c_char,
operands_list_len: *const size_t, operands_list_len: *const size_t,
num_operands: c_int, num_operands: c_int,
success: *mut u8, success: *mut u8,
new_value_length: *mut size_t, new_value_length: *mut size_t,
) -> *mut c_char { ) -> *mut c_char {
let cb = &mut *(raw_cb as *mut MergeOperatorCallback); let cb = &mut *(raw_cb as *mut MergeOperatorCallback);
let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands); let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands);
let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize);
let oldval = let oldval = if existing_value == ptr::null() {
if existing_value == ptr::null() { None
None } else {
} else { Some(slice::from_raw_parts(
Some(slice::from_raw_parts(existing_value as *const u8, existing_value_len as usize)) existing_value as *const u8,
}; existing_value_len as usize,
if let Some(mut result) = (cb.full_merge_fn)(key, oldval, operands) { ))
result.shrink_to_fit(); };
// TODO(tan) investigate zero-copy techniques to improve performance if let Some(mut result) = (cb.full_merge_fn)(key, oldval, operands) {
let buf = libc::malloc(result.len() as size_t); result.shrink_to_fit();
assert!(!buf.is_null()); // TODO(tan) investigate zero-copy techniques to improve performance
*new_value_length = result.len() as size_t; let buf = libc::malloc(result.len() as size_t);
*success = 1 as u8; assert!(!buf.is_null());
ptr::copy(result.as_ptr() as *mut c_void, &mut *buf, result.len()); *new_value_length = result.len() as size_t;
buf as *mut c_char *success = 1 as u8;
} else { ptr::copy(result.as_ptr() as *mut c_void, &mut *buf, result.len());
*success = 0 as u8; buf as *mut c_char
ptr::null_mut() as *mut c_char } else {
} *success = 0 as u8;
ptr::null_mut() as *mut c_char
}
} }
pub unsafe extern "C" fn partial_merge_callback( pub unsafe extern "C" fn partial_merge_callback(
raw_cb: *mut c_void, raw_cb: *mut c_void,
raw_key: *const c_char, raw_key: *const c_char,
key_len: size_t, key_len: size_t,
operands_list: *const *const c_char, operands_list: *const *const c_char,
operands_list_len: *const size_t, operands_list_len: *const size_t,
num_operands: c_int, num_operands: c_int,
success: *mut u8, success: *mut u8,
new_value_length: *mut size_t, new_value_length: *mut size_t,
) -> *mut c_char { ) -> *mut c_char {
let cb = &mut *(raw_cb as *mut MergeOperatorCallback); let cb = &mut *(raw_cb as *mut MergeOperatorCallback);
let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands); let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands);
let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize);
if let Some(mut result) = (cb.partial_merge_fn)(key, None, operands) { if let Some(mut result) = (cb.partial_merge_fn)(key, None, operands) {
result.shrink_to_fit(); result.shrink_to_fit();
// TODO(tan) investigate zero-copy techniques to improve performance // TODO(tan) investigate zero-copy techniques to improve performance
let buf = libc::malloc(result.len() as size_t); let buf = libc::malloc(result.len() as size_t);
assert!(!buf.is_null()); assert!(!buf.is_null());
*new_value_length = result.len() as size_t; *new_value_length = result.len() as size_t;
*success = 1 as u8; *success = 1 as u8;
ptr::copy(result.as_ptr() as *mut c_void, &mut *buf, result.len()); ptr::copy(result.as_ptr() as *mut c_void, &mut *buf, result.len());
buf as *mut c_char buf as *mut c_char
} else { } else {
*success = 0 as u8; *success = 0 as u8;
ptr::null_mut::<c_char>() ptr::null_mut::<c_char>()
} }
} }
pub struct MergeOperands { pub struct MergeOperands {
operands_list: *const *const c_char, operands_list: *const *const c_char,
operands_list_len: *const size_t, operands_list_len: *const size_t,
num_operands: usize, num_operands: usize,
cursor: usize, cursor: usize,
} }
impl MergeOperands { impl MergeOperands {
fn new( fn new(
operands_list: *const *const c_char, operands_list: *const *const c_char,
operands_list_len: *const size_t, operands_list_len: *const size_t,
num_operands: c_int, num_operands: c_int,
) -> MergeOperands { ) -> MergeOperands {
assert!(num_operands >= 0); assert!(num_operands >= 0);
MergeOperands { MergeOperands {
operands_list: operands_list, operands_list: operands_list,
operands_list_len: operands_list_len, operands_list_len: operands_list_len,
num_operands: num_operands as usize, num_operands: num_operands as usize,
cursor: 0, cursor: 0,
} }
} }
} }
impl<'a> Iterator for &'a mut MergeOperands { impl<'a> Iterator for &'a mut MergeOperands {
type Item = &'a [u8]; type Item = &'a [u8];
fn next(&mut self) -> Option<&'a [u8]> { fn next(&mut self) -> Option<&'a [u8]> {
if self.cursor == self.num_operands { if self.cursor == self.num_operands {
None None
} else { } else {
unsafe { unsafe {
let base = self.operands_list as usize; let base = self.operands_list as usize;
let base_len = self.operands_list_len as usize; let base_len = self.operands_list_len as usize;
let spacing = mem::size_of::<*const *const u8>(); let spacing = mem::size_of::<*const *const u8>();
let spacing_len = mem::size_of::<*const size_t>(); let spacing_len = mem::size_of::<*const size_t>();
let len_ptr = (base_len + (spacing_len * self.cursor)) as *const size_t; let len_ptr = (base_len + (spacing_len * self.cursor)) as *const size_t;
let len = *len_ptr as usize; let len = *len_ptr as usize;
let ptr = base + (spacing * self.cursor); let ptr = base + (spacing * self.cursor);
self.cursor += 1; self.cursor += 1;
Some(mem::transmute(slice::from_raw_parts( Some(mem::transmute(slice::from_raw_parts(
*(ptr as *const *const u8) as *const u8, *(ptr as *const *const u8) as *const u8,
len, len,
))) )))
} }
} }
} }
fn size_hint(&self) -> (usize, Option<usize>) { fn size_hint(&self) -> (usize, Option<usize>) {
let remaining = self.num_operands - self.cursor; let remaining = self.num_operands - self.cursor;
(remaining, Some(remaining)) (remaining, Some(remaining))
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
fn test_provided_merge( fn test_provided_merge(
_new_key: &[u8], _new_key: &[u8],
existing_val: Option<&[u8]>, existing_val: Option<&[u8]>,
operands: &mut MergeOperands, operands: &mut MergeOperands,
) -> Option<Vec<u8>> { ) -> Option<Vec<u8>> {
let nops = operands.size_hint().0; let nops = operands.size_hint().0;
let mut result: Vec<u8> = Vec::with_capacity(nops); let mut result: Vec<u8> = Vec::with_capacity(nops);
if let Some(v) = existing_val { if let Some(v) = existing_val {
for e in v { for e in v {
result.push(*e); result.push(*e);
} }
} }
for op in operands { for op in operands {
for e in op { for e in op {
result.push(*e); result.push(*e);
} }
} }
Some(result) Some(result)
} }
#[test] #[test]
fn mergetest() { fn mergetest() {
use {DB, Options}; use {Options, DB};
let path = "_rust_rocksdb_mergetest"; let path = "_rust_rocksdb_mergetest";
let mut opts = Options::default(); let mut opts = Options::default();
opts.create_if_missing(true); opts.create_if_missing(true);
opts.set_merge_operator("test operator", test_provided_merge, None); opts.set_merge_operator("test operator", test_provided_merge, None);
{ {
let db = DB::open(&opts, path).unwrap(); let db = DB::open(&opts, path).unwrap();
let p = db.put(b"k1", b"a"); let p = db.put(b"k1", b"a");
assert!(p.is_ok()); assert!(p.is_ok());
let _ = db.merge(b"k1", b"b"); let _ = db.merge(b"k1", b"b");
let _ = db.merge(b"k1", b"c"); let _ = db.merge(b"k1", b"c");
let _ = db.merge(b"k1", b"d"); let _ = db.merge(b"k1", b"d");
let _ = db.merge(b"k1", b"efg"); let _ = db.merge(b"k1", b"efg");
let m = db.merge(b"k1", b"h"); let m = db.merge(b"k1", b"h");
assert!(m.is_ok()); assert!(m.is_ok());
match db.get(b"k1") { match db.get(b"k1") {
Ok(Some(value)) => { Ok(Some(value)) => match value.to_utf8() {
match value.to_utf8() { Some(v) => println!("retrieved utf8 value: {}", v),
Some(v) => println!("retrieved utf8 value: {}", v), None => println!("did not read valid utf-8 out of the db"),
None => println!("did not read valid utf-8 out of the db"), },
} Err(_) => println!("error reading value"),
} _ => panic!("value not present"),
Err(_) => println!("error reading value"), }
_ => panic!("value not present"),
} assert!(m.is_ok());
let r = db.get(b"k1");
assert!(m.is_ok()); assert!(r.unwrap().unwrap().to_utf8().unwrap() == "abcdefgh");
let r = db.get(b"k1"); assert!(db.delete(b"k1").is_ok());
assert!(r.unwrap().unwrap().to_utf8().unwrap() == "abcdefgh"); assert!(db.get(b"k1").unwrap().is_none());
assert!(db.delete(b"k1").is_ok()); }
assert!(db.get(b"k1").unwrap().is_none()); assert!(DB::destroy(&opts, path).is_ok());
} }
assert!(DB::destroy(&opts, path).is_ok());
} unsafe fn to_slice<T: Sized>(p: &T) -> &[u8] {
::std::slice::from_raw_parts((p as *const T) as *const u8, ::std::mem::size_of::<T>())
unsafe fn to_slice<T: Sized>(p: &T) -> &[u8] { }
::std::slice::from_raw_parts(
(p as *const T) as *const u8, fn from_slice<T: Sized>(s: &[u8]) -> Option<&T> {
::std::mem::size_of::<T>(), if ::std::mem::size_of::<T>() != s.len() {
) println!(
} "slice {:?} is len {}, but T is size {}",
s,
fn from_slice<T: Sized>(s: &[u8]) -> Option<&T> { s.len(),
if ::std::mem::size_of::<T>() != s.len() { ::std::mem::size_of::<T>()
println!("slice {:?} is len {}, but T is size {}", s, s.len(), ::std::mem::size_of::<T>()); );
None None
} else { } else {
unsafe { unsafe { Some(::std::mem::transmute(s.as_ptr())) }
Some(::std::mem::transmute(s.as_ptr())) }
} }
}
} #[repr(packed)]
#[derive(Copy, Clone, Debug)]
#[repr(packed)] struct ValueCounts {
num_a: u32,
#[derive(Copy, Clone, Debug)] num_b: u32,
struct ValueCounts { num_c: u32,
num_a: u32, num_d: u32,
num_b: u32, }
num_c: u32,
num_d: u32, fn test_counting_partial_merge(
} _new_key: &[u8],
_existing_val: Option<&[u8]>,
fn test_counting_partial_merge( operands: &mut MergeOperands,
_new_key: &[u8], ) -> Option<Vec<u8>> {
_existing_val: Option<&[u8]>, let nops = operands.size_hint().0;
operands: &mut MergeOperands, let mut result: Vec<u8> = Vec::with_capacity(nops);
) -> Option<Vec<u8>> { for op in operands {
let nops = operands.size_hint().0; for e in op {
let mut result: Vec<u8> = Vec::with_capacity(nops); result.push(*e);
for op in operands { }
for e in op { }
result.push(*e); Some(result)
} }
}
Some(result) fn test_counting_full_merge(
} _new_key: &[u8],
existing_val: Option<&[u8]>,
fn test_counting_full_merge( operands: &mut MergeOperands,
_new_key: &[u8], ) -> Option<Vec<u8>> {
existing_val: Option<&[u8]>, let mut counts: ValueCounts = if let Some(v) = existing_val {
operands: &mut MergeOperands, from_slice::<ValueCounts>(v).unwrap().clone()
) -> Option<Vec<u8>> { } else {
ValueCounts {
let mut counts : ValueCounts = num_a: 0,
if let Some(v) = existing_val { num_b: 0,
from_slice::<ValueCounts>(v).unwrap().clone() num_c: 0,
} else { num_d: 0,
ValueCounts { }
num_a: 0, };
num_b: 0,
num_c: 0, for op in operands {
num_d: 0 } for e in op {
}; match *e {
b'a' => counts.num_a += 1,
for op in operands { b'b' => counts.num_b += 1,
for e in op { b'c' => counts.num_c += 1,
match *e { b'd' => counts.num_d += 1,
b'a' => counts.num_a += 1, _ => {}
b'b' => counts.num_b += 1, }
b'c' => counts.num_c += 1, }
b'd' => counts.num_d += 1, }
_ => {} let slc = unsafe { to_slice(&counts) };
} Some(slc.to_vec())
} }
}
let slc = unsafe { to_slice(&counts) }; #[test]
Some(slc.to_vec()) fn counting_mergetest() {
} use std::sync::Arc;
use std::thread;
#[test] use {DBCompactionStyle, Options, DB};
fn counting_mergetest() {
use std::thread; let path = "_rust_rocksdb_partial_mergetest";
use std::sync::Arc; let mut opts = Options::default();
use {DB, Options, DBCompactionStyle}; opts.create_if_missing(true);
opts.set_compaction_style(DBCompactionStyle::Universal);
let path = "_rust_rocksdb_partial_mergetest"; opts.set_min_write_buffer_number_to_merge(10);
let mut opts = Options::default();
opts.create_if_missing(true); opts.set_merge_operator(
opts.set_compaction_style(DBCompactionStyle::Universal); "sort operator",
opts.set_min_write_buffer_number_to_merge(10); test_counting_full_merge,
Some(test_counting_partial_merge),
opts.set_merge_operator("sort operator", test_counting_full_merge, Some(test_counting_partial_merge)); );
{ {
let db = Arc::new(DB::open(&opts, path).unwrap()); let db = Arc::new(DB::open(&opts, path).unwrap());
let _ = db.delete(b"k1"); let _ = db.delete(b"k1");
let _ = db.delete(b"k2"); let _ = db.delete(b"k2");
let _ = db.merge(b"k1", b"a"); let _ = db.merge(b"k1", b"a");
let _ = db.merge(b"k1", b"b"); let _ = db.merge(b"k1", b"b");
let _ = db.merge(b"k1", b"d"); let _ = db.merge(b"k1", b"d");
let _ = db.merge(b"k1", b"a"); let _ = db.merge(b"k1", b"a");
let _ = db.merge(b"k1", b"a"); let _ = db.merge(b"k1", b"a");
let _ = db.merge(b"k1", b"efg"); let _ = db.merge(b"k1", b"efg");
for i in 0..500 { for i in 0..500 {
let _ = db.merge(b"k2", b"c"); let _ = db.merge(b"k2", b"c");
if i % 20 == 0 { if i % 20 == 0 {
let _ = db.get(b"k2"); let _ = db.get(b"k2");
} }
} }
for i in 0..500 { for i in 0..500 {
let _ = db.merge(b"k2", b"c"); let _ = db.merge(b"k2", b"c");
if i % 20 == 0 { if i % 20 == 0 {
let _ = db.get(b"k2"); let _ = db.get(b"k2");
} }
} }
db.compact_range(None, None); db.compact_range(None, None);
let d1 = db.clone(); let d1 = db.clone();
let d2 = db.clone(); let d2 = db.clone();
let d3 = db.clone(); let d3 = db.clone();
let h1 = thread::spawn(move || { let h1 = thread::spawn(move || {
for i in 0..500 { for i in 0..500 {
let _ = d1.merge(b"k2", b"c"); let _ = d1.merge(b"k2", b"c");
if i % 20 == 0 { if i % 20 == 0 {
let _ = d1.get(b"k2"); let _ = d1.get(b"k2");
} }
} }
for i in 0..500 { for i in 0..500 {
let _ = d1.merge(b"k2", b"a"); let _ = d1.merge(b"k2", b"a");
if i % 20 == 0 { if i % 20 == 0 {
let _ = d1.get(b"k2"); let _ = d1.get(b"k2");
} }
} }
}); });
let h2 = thread::spawn(move || { let h2 = thread::spawn(move || {
for i in 0..500 { for i in 0..500 {
let _ = d2.merge(b"k2", b"b"); let _ = d2.merge(b"k2", b"b");
if i % 20 == 0 { if i % 20 == 0 {
let _ = d2.get(b"k2"); let _ = d2.get(b"k2");
} }
} }
for i in 0..500 { for i in 0..500 {
let _ = d2.merge(b"k2", b"d"); let _ = d2.merge(b"k2", b"d");
if i % 20 == 0 { if i % 20 == 0 {
let _ = d2.get(b"k2"); let _ = d2.get(b"k2");
} }
} }
d2.compact_range(None, None); d2.compact_range(None, None);
}); });
h2.join().unwrap(); h2.join().unwrap();
let h3 = thread::spawn(move || { let h3 = thread::spawn(move || {
for i in 0..500 { for i in 0..500 {
let _ = d3.merge(b"k2", b"a"); let _ = d3.merge(b"k2", b"a");
if i % 20 == 0 { if i % 20 == 0 {
let _ = d3.get(b"k2"); let _ = d3.get(b"k2");
} }
} }
for i in 0..500 { for i in 0..500 {
let _ = d3.merge(b"k2", b"c"); let _ = d3.merge(b"k2", b"c");
if i % 20 == 0 { if i % 20 == 0 {
let _ = d3.get(b"k2"); let _ = d3.get(b"k2");
} }
} }
}); });
let m = db.merge(b"k1", b"b"); let m = db.merge(b"k1", b"b");
assert!(m.is_ok()); assert!(m.is_ok());
h3.join().unwrap(); h3.join().unwrap();
h1.join().unwrap(); h1.join().unwrap();
match db.get(b"k2") { match db.get(b"k2") {
Ok(Some(value)) => { Ok(Some(value)) => match from_slice::<ValueCounts>(&*value) {
match from_slice::<ValueCounts>(&*value) { Some(v) => unsafe {
Some(v) => unsafe { assert_eq!(v.num_a, 1000);
assert_eq!(v.num_a, 1000); assert_eq!(v.num_b, 500);
assert_eq!(v.num_b, 500); assert_eq!(v.num_c, 2000);
assert_eq!(v.num_c, 2000); assert_eq!(v.num_d, 500);
assert_eq!(v.num_d, 500); },
}, None => panic!("Failed to get ValueCounts from db"),
None => panic!("Failed to get ValueCounts from db"), },
} Err(e) => panic!("error reading value {:?}", e),
} _ => panic!("value not present"),
Err(e) => panic!("error reading value {:?}", e), }
_ => panic!("value not present"), match db.get(b"k1") {
} Ok(Some(value)) => match from_slice::<ValueCounts>(&*value) {
match db.get(b"k1") { Some(v) => unsafe {
Ok(Some(value)) => { assert_eq!(v.num_a, 3);
match from_slice::<ValueCounts>(&*value) { assert_eq!(v.num_b, 2);
Some(v) => unsafe { assert_eq!(v.num_c, 0);
assert_eq!(v.num_a, 3); assert_eq!(v.num_d, 1);
assert_eq!(v.num_b, 2); },
assert_eq!(v.num_c, 0); None => panic!("Failed to get ValueCounts from db"),
assert_eq!(v.num_d, 1); },
}, Err(e) => panic!("error reading value {:?}", e),
None => panic!("Failed to get ValueCounts from db"), _ => panic!("value not present"),
} }
} }
Err(e) => panic!("error reading value {:?}", e), assert!(DB::destroy(&opts, path).is_ok());
_ => panic!("value not present"), }
}
}
assert!(DB::destroy(&opts, path).is_ok());
}
} }

@ -40,7 +40,7 @@ impl SliceTransform {
name: &str, name: &str,
transform_fn: TransformFn, transform_fn: TransformFn,
in_domain_fn: Option<InDomainFn>, in_domain_fn: Option<InDomainFn>,
) -> SliceTransform{ ) -> SliceTransform {
let cb = Box::new(TransformCallback { let cb = Box::new(TransformCallback {
name: CString::new(name.as_bytes()).unwrap(), name: CString::new(name.as_bytes()).unwrap(),
transform_fn: transform_fn, transform_fn: transform_fn,
@ -48,11 +48,10 @@ impl SliceTransform {
}); });
let st = unsafe { let st = unsafe {
ffi::rocksdb_slicetransform_create( ffi::rocksdb_slicetransform_create(
mem::transmute(cb), mem::transmute(cb),
Some(slice_transform_destructor_callback), Some(slice_transform_destructor_callback),
Some(transform_callback), Some(transform_callback),
// this is ugly, but I can't get the compiler // this is ugly, but I can't get the compiler
// not to barf with "expected fn pointer, found fn item" // not to barf with "expected fn pointer, found fn item"
// without this. sorry. // without this. sorry.
@ -61,31 +60,24 @@ impl SliceTransform {
} else { } else {
None None
}, },
// this None points to the deprecated InRange callback // this None points to the deprecated InRange callback
None, None,
Some(slice_transform_name_callback), Some(slice_transform_name_callback),
) )
}; };
SliceTransform { SliceTransform { inner: st }
inner: st
}
} }
pub fn create_fixed_prefix(len: size_t) -> SliceTransform { pub fn create_fixed_prefix(len: size_t) -> SliceTransform {
SliceTransform { SliceTransform {
inner: unsafe { inner: unsafe { ffi::rocksdb_slicetransform_create_fixed_prefix(len) },
ffi::rocksdb_slicetransform_create_fixed_prefix(len)
},
} }
} }
pub fn create_noop() -> SliceTransform { pub fn create_noop() -> SliceTransform {
SliceTransform { SliceTransform {
inner: unsafe { inner: unsafe { ffi::rocksdb_slicetransform_create_noop() },
ffi::rocksdb_slicetransform_create_noop()
},
} }
} }
} }
@ -94,34 +86,30 @@ pub type TransformFn = fn(&[u8]) -> Vec<u8>;
pub type InDomainFn = fn(&[u8]) -> bool; pub type InDomainFn = fn(&[u8]) -> bool;
pub struct TransformCallback { pub struct TransformCallback {
pub name: CString, pub name: CString,
pub transform_fn: TransformFn, pub transform_fn: TransformFn,
pub in_domain_fn: Option<InDomainFn>, pub in_domain_fn: Option<InDomainFn>,
} }
pub unsafe extern "C" fn slice_transform_destructor_callback( pub unsafe extern "C" fn slice_transform_destructor_callback(raw_cb: *mut c_void) {
raw_cb: *mut c_void let transform: Box<TransformCallback> = mem::transmute(raw_cb);
) { drop(transform);
let transform: Box<TransformCallback> = mem::transmute(raw_cb);
drop(transform);
} }
pub unsafe extern "C" fn slice_transform_name_callback( pub unsafe extern "C" fn slice_transform_name_callback(raw_cb: *mut c_void) -> *const c_char {
raw_cb: *mut c_void let cb = &mut *(raw_cb as *mut TransformCallback);
) -> *const c_char { cb.name.as_ptr()
let cb = &mut *(raw_cb as *mut TransformCallback);
cb.name.as_ptr()
} }
pub unsafe extern "C" fn transform_callback( pub unsafe extern "C" fn transform_callback(
raw_cb: *mut c_void, raw_cb: *mut c_void,
raw_key: *const c_char, raw_key: *const c_char,
key_len: size_t, key_len: size_t,
dst_length: *mut size_t, dst_length: *mut size_t,
) -> *mut c_char { ) -> *mut c_char {
let cb = &mut *(raw_cb as *mut TransformCallback); let cb = &mut *(raw_cb as *mut TransformCallback);
let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize);
let mut result = (cb.transform_fn)(key); let mut result = (cb.transform_fn)(key);
result.shrink_to_fit(); result.shrink_to_fit();
// copy the result into a C++ destroyable buffer // copy the result into a C++ destroyable buffer
@ -135,11 +123,11 @@ pub unsafe extern "C" fn transform_callback(
pub unsafe extern "C" fn in_domain_callback( pub unsafe extern "C" fn in_domain_callback(
raw_cb: *mut c_void, raw_cb: *mut c_void,
raw_key: *const c_char, raw_key: *const c_char,
key_len: size_t, key_len: size_t,
) -> u8 { ) -> u8 {
let cb = &mut *(raw_cb as *mut TransformCallback); let cb = &mut *(raw_cb as *mut TransformCallback);
let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize);
if (cb.in_domain_fn.unwrap())(key) { if (cb.in_domain_fn.unwrap())(key) {
1 1

@ -14,7 +14,7 @@
// //
extern crate rocksdb; extern crate rocksdb;
use rocksdb::{checkpoint::Checkpoint, DB, Options}; use rocksdb::{checkpoint::Checkpoint, Options, DB};
use std::fs::remove_dir_all; use std::fs::remove_dir_all;
#[test] #[test]

@ -15,7 +15,7 @@
extern crate rocksdb; extern crate rocksdb;
mod util; mod util;
use rocksdb::{DB, MergeOperands, Options, ColumnFamilyDescriptor}; use rocksdb::{ColumnFamilyDescriptor, MergeOperands, Options, DB};
use util::DBPath; use util::DBPath;
#[test] #[test]
@ -42,16 +42,15 @@ pub fn test_column_family() {
let mut opts = Options::default(); let mut opts = Options::default();
opts.set_merge_operator("test operator", test_provided_merge, None); opts.set_merge_operator("test operator", test_provided_merge, None);
match DB::open(&opts, &n) { match DB::open(&opts, &n) {
Ok(_db) => { Ok(_db) => panic!(
panic!("should not have opened DB successfully without \ "should not have opened DB successfully without \
specifying column specifying column
families") families"
} ),
Err(e) => { Err(e) => assert!(e.to_string().starts_with(
assert!(e.to_string() "Invalid argument: You have to open all \
.starts_with("Invalid argument: You have to open all \ column families."
column families.")) )),
}
} }
} }
@ -76,11 +75,9 @@ pub fn test_column_family() {
} }
// TODO should be able to use writebatch ops with a cf // TODO should be able to use writebatch ops with a cf
{ {}
}
// TODO should be able to iterate over a cf // TODO should be able to iterate over a cf
{ {}
}
// should b able to drop a cf // should b able to drop a cf
{ {
let mut db = DB::open_cf(&Options::default(), &n, &["cf1"]).unwrap(); let mut db = DB::open_cf(&Options::default(), &n, &["cf1"]).unwrap();
@ -136,12 +133,10 @@ fn test_merge_operator() {
println!("m is {:?}", m); println!("m is {:?}", m);
// TODO assert!(m.is_ok()); // TODO assert!(m.is_ok());
match db.get(b"k1") { match db.get(b"k1") {
Ok(Some(value)) => { Ok(Some(value)) => match value.to_utf8() {
match value.to_utf8() { Some(v) => println!("retrieved utf8 value: {}", v),
Some(v) => println!("retrieved utf8 value: {}", v), None => println!("did not read valid utf-8 out of the db"),
None => println!("did not read valid utf-8 out of the db"), },
}
}
Err(_) => println!("error reading value"), Err(_) => println!("error reading value"),
_ => panic!("value not present!"), _ => panic!("value not present!"),
} }
@ -151,13 +146,13 @@ fn test_merge_operator() {
assert!(db.delete(b"k1").is_ok()); assert!(db.delete(b"k1").is_ok());
assert!(db.get(b"k1").unwrap().is_none()); assert!(db.get(b"k1").unwrap().is_none());
} }
} }
fn test_provided_merge(_: &[u8], fn test_provided_merge(
existing_val: Option<&[u8]>, _: &[u8],
operands: &mut MergeOperands) existing_val: Option<&[u8]>,
-> Option<Vec<u8>> { operands: &mut MergeOperands,
) -> Option<Vec<u8>> {
let nops = operands.size_hint().0; let nops = operands.size_hint().0;
let mut result: Vec<u8> = Vec::with_capacity(nops); let mut result: Vec<u8> = Vec::with_capacity(nops);
match existing_val { match existing_val {
@ -192,7 +187,10 @@ pub fn test_column_family_with_options() {
match DB::open_cf_descriptors(&opts, &n, cfs) { match DB::open_cf_descriptors(&opts, &n, cfs) {
Ok(_db) => println!("created db with column family descriptors succesfully"), Ok(_db) => println!("created db with column family descriptors succesfully"),
Err(e) => { Err(e) => {
panic!("could not create new database with column family descriptors: {}", e); panic!(
"could not create new database with column family descriptors: {}",
e
);
} }
} }
} }
@ -208,7 +206,10 @@ pub fn test_column_family_with_options() {
match DB::open_cf_descriptors(&opts, &n, cfs) { match DB::open_cf_descriptors(&opts, &n, cfs) {
Ok(_db) => println!("succesfully re-opened database with column family descriptors"), Ok(_db) => println!("succesfully re-opened database with column family descriptors"),
Err(e) => { Err(e) => {
panic!("unable to re-open database with column family descriptors: {}", e); panic!(
"unable to re-open database with column family descriptors: {}",
e
);
} }
} }
} }

@ -15,7 +15,7 @@
extern crate rocksdb; extern crate rocksdb;
mod util; mod util;
use rocksdb::{DB, Direction, IteratorMode, MemtableFactory, Options}; use rocksdb::{Direction, IteratorMode, MemtableFactory, Options, DB};
use util::DBPath; use util::DBPath;
fn cba(input: &Box<[u8]>) -> Box<[u8]> { fn cba(input: &Box<[u8]>) -> Box<[u8]> {
@ -41,7 +41,11 @@ pub fn test_iterator() {
assert!(p.is_ok()); assert!(p.is_ok());
let p = db.put(&*k3, &*v3); let p = db.put(&*k3, &*v3);
assert!(p.is_ok()); assert!(p.is_ok());
let expected = vec![(cba(&k1), cba(&v1)), (cba(&k2), cba(&v2)), (cba(&k3), cba(&v3))]; let expected = vec![
(cba(&k1), cba(&v1)),
(cba(&k2), cba(&v2)),
(cba(&k3), cba(&v3)),
];
{ {
let iterator1 = db.iterator(IteratorMode::Start); let iterator1 = db.iterator(IteratorMode::Start);
assert_eq!(iterator1.collect::<Vec<_>>(), expected); assert_eq!(iterator1.collect::<Vec<_>>(), expected);
@ -103,10 +107,12 @@ pub fn test_iterator() {
let old_iterator = db.iterator(IteratorMode::Start); let old_iterator = db.iterator(IteratorMode::Start);
let p = db.put(&*k4, &*v4); let p = db.put(&*k4, &*v4);
assert!(p.is_ok()); assert!(p.is_ok());
let expected2 = vec![(cba(&k1), cba(&v1)), let expected2 = vec![
(cba(&k2), cba(&v2)), (cba(&k1), cba(&v1)),
(cba(&k3), cba(&v3)), (cba(&k2), cba(&v2)),
(cba(&k4), cba(&v4))]; (cba(&k3), cba(&v3)),
(cba(&k4), cba(&v4)),
];
{ {
assert_eq!(old_iterator.collect::<Vec<_>>(), expected); assert_eq!(old_iterator.collect::<Vec<_>>(), expected);
} }
@ -116,7 +122,11 @@ pub fn test_iterator() {
} }
{ {
let iterator1 = db.iterator(IteratorMode::From(b"k2", Direction::Forward)); let iterator1 = db.iterator(IteratorMode::From(b"k2", Direction::Forward));
let expected = vec![(cba(&k2), cba(&v2)), (cba(&k3), cba(&v3)), (cba(&k4), cba(&v4))]; let expected = vec![
(cba(&k2), cba(&v2)),
(cba(&k3), cba(&v3)),
(cba(&k4), cba(&v4)),
];
assert_eq!(iterator1.collect::<Vec<_>>(), expected); assert_eq!(iterator1.collect::<Vec<_>>(), expected);
} }
{ {
@ -157,7 +167,9 @@ pub fn test_iterator() {
} }
} }
fn key(k: &[u8]) -> Box<[u8]> { k.to_vec().into_boxed_slice() } fn key(k: &[u8]) -> Box<[u8]> {
k.to_vec().into_boxed_slice()
}
#[test] #[test]
pub fn test_prefix_iterator() { pub fn test_prefix_iterator() {

@ -16,11 +16,10 @@ extern crate rocksdb;
mod util; mod util;
use rocksdb::DB; use rocksdb::DB;
use std::thread;
use std::sync::Arc; use std::sync::Arc;
use std::thread;
use util::DBPath; use util::DBPath;
const N: usize = 100_000; const N: usize = 100_000;
#[test] #[test]

@ -41,9 +41,9 @@ pub fn test_forwards_iteration() {
assert_eq!(iter.key(), Some(b"k2".to_vec())); assert_eq!(iter.key(), Some(b"k2".to_vec()));
assert_eq!(iter.value(), Some(b"v2".to_vec())); assert_eq!(iter.value(), Some(b"v2".to_vec()));
iter.next(); // k3 iter.next(); // k3
iter.next(); // k4 iter.next(); // k4
iter.next(); // invalid! iter.next(); // invalid!
assert_eq!(iter.valid(), false); assert_eq!(iter.valid(), false);
assert_eq!(iter.key(), None); assert_eq!(iter.key(), None);
@ -51,7 +51,6 @@ pub fn test_forwards_iteration() {
} }
} }
#[test] #[test]
pub fn test_seek_last() { pub fn test_seek_last() {
let n = DBPath::new("backwards_iteration"); let n = DBPath::new("backwards_iteration");
@ -75,9 +74,9 @@ pub fn test_seek_last() {
assert_eq!(iter.key(), Some(b"k3".to_vec())); assert_eq!(iter.key(), Some(b"k3".to_vec()));
assert_eq!(iter.value(), Some(b"v3".to_vec())); assert_eq!(iter.value(), Some(b"v3".to_vec()));
iter.prev(); // k2 iter.prev(); // k2
iter.prev(); // k1 iter.prev(); // k1
iter.prev(); // invalid! iter.prev(); // invalid!
assert_eq!(iter.valid(), false); assert_eq!(iter.valid(), false);
assert_eq!(iter.key(), None); assert_eq!(iter.key(), None);
@ -85,7 +84,6 @@ pub fn test_seek_last() {
} }
} }
#[test] #[test]
pub fn test_seek() { pub fn test_seek() {
let n = DBPath::new("seek"); let n = DBPath::new("seek");
@ -111,7 +109,6 @@ pub fn test_seek() {
} }
} }
#[test] #[test]
pub fn test_seek_to_nonexistant() { pub fn test_seek_to_nonexistant() {
let n = DBPath::new("seek_to_nonexistant"); let n = DBPath::new("seek_to_nonexistant");

@ -15,7 +15,7 @@
extern crate rocksdb; extern crate rocksdb;
mod util; mod util;
use rocksdb::{DB, Options}; use rocksdb::{Options, DB};
use util::DBPath; use util::DBPath;
#[test] #[test]

@ -1,7 +1,7 @@
extern crate rocksdb; extern crate rocksdb;
mod util; mod util;
use rocksdb::{DB, Options, SliceTransform}; use rocksdb::{Options, SliceTransform, DB};
use util::DBPath; use util::DBPath;
#[test] #[test]
@ -34,7 +34,9 @@ pub fn test_slice_transform() {
input.iter().cloned().collect::<Vec<_>>().into_boxed_slice() input.iter().cloned().collect::<Vec<_>>().into_boxed_slice()
} }
fn key(k: &[u8]) -> Box<[u8]> { k.to_vec().into_boxed_slice() } fn key(k: &[u8]) -> Box<[u8]> {
k.to_vec().into_boxed_slice()
}
{ {
let expected = vec![(cba(&a1), cba(&a1)), (cba(&a2), cba(&a2))]; let expected = vec![(cba(&a1), cba(&a1)), (cba(&a2), cba(&a2))];

@ -1,13 +1,13 @@
extern crate rocksdb; extern crate rocksdb;
use std::path::{Path, PathBuf};
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
use std::path::{PathBuf, Path};
use rocksdb::{DB, Options}; use rocksdb::{Options, DB};
/// Ensures that DB::Destroy is called for this database when DBPath is dropped. /// Ensures that DB::Destroy is called for this database when DBPath is dropped.
pub struct DBPath { pub struct DBPath {
path: PathBuf path: PathBuf,
} }
impl DBPath { impl DBPath {
@ -22,7 +22,9 @@ impl DBPath {
current_time.subsec_nanos() current_time.subsec_nanos()
); );
DBPath { path: PathBuf::from(path) } DBPath {
path: PathBuf::from(path),
}
} }
} }
@ -38,4 +40,3 @@ impl AsRef<Path> for DBPath {
&self.path &self.path
} }
} }

Loading…
Cancel
Save