Bump rocksdb to 7.8.3 (#722)

master
Oleksandr Anyshchenko 2 years ago committed by GitHub
parent f5bf5ca7b5
commit b5b9e56d08
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 25
      .github/workflows/rust.yml
  2. 2
      CHANGELOG.md
  3. 2
      Cargo.toml
  4. 4
      librocksdb-sys/Cargo.toml
  5. 8
      librocksdb-sys/build.rs
  6. 8
      librocksdb-sys/build_version.cc
  7. 2
      librocksdb-sys/rocksdb
  8. 11
      librocksdb-sys/rocksdb_lib_sources.txt
  9. 4
      src/compaction_filter.rs
  10. 4
      src/comparator.rs
  11. 22
      src/db.rs
  12. 4
      src/db_iterator.rs
  13. 2
      src/db_options.rs
  14. 2
      src/db_pinnable_slice.rs
  15. 10
      src/merge_operator.rs
  16. 2
      src/properties.rs
  17. 4
      src/slice_transform.rs
  18. 2
      src/transactions/transaction.rs
  19. 10
      src/write_batch.rs
  20. 25
      tests/test_column_family.rs
  21. 26
      tests/test_db.rs
  22. 2
      tests/test_iterator.rs
  23. 16
      tests/test_optimistic_transaction_db.rs
  24. 16
      tests/test_transaction_db.rs

@ -79,25 +79,10 @@ jobs:
- name: Install dependencies - name: Install dependencies
if: runner.os == 'Windows' if: runner.os == 'Windows'
run: choco install llvm -y run: choco install llvm -y
- name: Run librocksdb-sys tests - name: Run rocksdb tests
uses: actions-rs/cargo@v1 run: |
with: cargo test --all
command: test cargo test --all --features multi-threaded-cf
args: --manifest-path=librocksdb-sys/Cargo.toml
- name: Run rocksdb tests (single-threaded cf)
uses: actions-rs/cargo@v1
with:
command: test
- name: Run rocksdb tests (multi-threaded cf)
uses: actions-rs/cargo@v1
env:
RUSTFLAGS: -Awarnings # Suppress "variable does not need to be mutable" warnings
with:
command: test
args: --features multi-threaded-cf
- name: Run rocksdb tests (jemalloc) - name: Run rocksdb tests (jemalloc)
if: runner.os != 'Windows' if: runner.os != 'Windows'
uses: actions-rs/cargo@v1 run: cargo test --all --features jemalloc
with:
command: test
args: --features jemalloc

@ -2,6 +2,8 @@
## [Unreleased] ## [Unreleased]
* Bump rocksdb to 7.8.3 (aleksuss)
## 0.19.0 (2022-08-05) ## 0.19.0 (2022-08-05)
* Add support for building with `io_uring` on Linux (parazyd) * Add support for building with `io_uring` on Linux (parazyd)

@ -35,7 +35,7 @@ serde1 = ["serde"]
[dependencies] [dependencies]
libc = "0.2" libc = "0.2"
librocksdb-sys = { path = "librocksdb-sys", version = "0.8.0" } librocksdb-sys = { path = "librocksdb-sys", version = "0.9.0" }
serde = { version = "1", features = [ "derive" ], optional = true } serde = { version = "1", features = [ "derive" ], optional = true }
[dev-dependencies] [dev-dependencies]

@ -1,6 +1,6 @@
[package] [package]
name = "librocksdb-sys" name = "librocksdb-sys"
version = "0.8.0+7.4.4" version = "0.9.0+7.8.3"
edition = "2018" edition = "2018"
authors = ["Karl Hobley <karlhobley10@gmail.com>", "Arkadiy Paronyan <arkadiy@ethcore.io>"] authors = ["Karl Hobley <karlhobley10@gmail.com>", "Arkadiy Paronyan <arkadiy@ethcore.io>"]
license = "MIT/Apache-2.0/BSD-3-Clause" license = "MIT/Apache-2.0/BSD-3-Clause"
@ -37,6 +37,6 @@ uuid = { version = "1.0", features = ["v4"] }
[build-dependencies] [build-dependencies]
cc = { version = "1.0", features = ["parallel"] } cc = { version = "1.0", features = ["parallel"] }
bindgen = { version = "0.60", default-features = false, features = ["runtime"] } bindgen = { version = "0.63", default-features = false, features = ["runtime"] }
glob = "0.3" glob = "0.3"
pkg-config = { version = "0.3", optional = true } pkg-config = { version = "0.3", optional = true }

@ -248,7 +248,7 @@ fn build_rocksdb() {
} }
for file in lib_sources { for file in lib_sources {
config.file(&format!("rocksdb/{file}")); config.file(format!("rocksdb/{file}"));
} }
config.file("build_version.cc"); config.file("build_version.cc");
@ -289,7 +289,7 @@ fn build_snappy() {
fn try_to_find_and_link_lib(lib_name: &str) -> bool { fn try_to_find_and_link_lib(lib_name: &str) -> bool {
println!("cargo:rerun-if-env-changed={}_COMPILE", lib_name); println!("cargo:rerun-if-env-changed={}_COMPILE", lib_name);
if let Ok(v) = env::var(&format!("{}_COMPILE", lib_name)) { if let Ok(v) = env::var(format!("{}_COMPILE", lib_name)) {
if v.to_lowercase() == "true" || v == "1" { if v.to_lowercase() == "true" || v == "1" {
return false; return false;
} }
@ -298,9 +298,9 @@ fn try_to_find_and_link_lib(lib_name: &str) -> bool {
println!("cargo:rerun-if-env-changed={}_LIB_DIR", lib_name); println!("cargo:rerun-if-env-changed={}_LIB_DIR", lib_name);
println!("cargo:rerun-if-env-changed={}_STATIC", lib_name); println!("cargo:rerun-if-env-changed={}_STATIC", lib_name);
if let Ok(lib_dir) = env::var(&format!("{}_LIB_DIR", lib_name)) { if let Ok(lib_dir) = env::var(format!("{}_LIB_DIR", lib_name)) {
println!("cargo:rustc-link-search=native={}", lib_dir); println!("cargo:rustc-link-search=native={}", lib_dir);
let mode = match env::var_os(&format!("{}_STATIC", lib_name)) { let mode = match env::var_os(format!("{}_STATIC", lib_name)) {
Some(_) => "static", Some(_) => "static",
None => "dylib", None => "dylib",
}; };

@ -8,17 +8,17 @@
// The build script may replace these values with real values based // The build script may replace these values with real values based
// on whether or not GIT is available and the platform settings // on whether or not GIT is available and the platform settings
static const std::string rocksdb_build_git_sha = "e656fa3d196c5b4c8a77255db1e6cd36a7ded348"; static const std::string rocksdb_build_git_sha = "bf2c335184de16a3cc1787fa97ef9f22f7114238";
static const std::string rocksdb_build_git_tag = "rocksdb_build_git_tag:v7.4.4"; static const std::string rocksdb_build_git_tag = "rocksdb_build_git_tag:v7.8.3";
#define HAS_GIT_CHANGES 0 #define HAS_GIT_CHANGES 0
#if HAS_GIT_CHANGES == 0 #if HAS_GIT_CHANGES == 0
// If HAS_GIT_CHANGES is 0, the GIT date is used. // If HAS_GIT_CHANGES is 0, the GIT date is used.
// Use the time the branch/tag was last modified // Use the time the branch/tag was last modified
static const std::string rocksdb_build_date = "rocksdb_build_date:2022-07-19 08:49:59"; static const std::string rocksdb_build_date = "rocksdb_build_date:2022-11-29 06:51:03";
#else #else
// If HAS_GIT_CHANGES is > 0, the branch/tag has modifications. // If HAS_GIT_CHANGES is > 0, the branch/tag has modifications.
// Use the time the build was created. // Use the time the build was created.
static const std::string rocksdb_build_date = "rocksdb_build_date:2022-07-19 08:49:59"; static const std::string rocksdb_build_date = "rocksdb_build_date:2022-11-29 06:51:03";
#endif #endif
#ifndef ROCKSDB_LITE #ifndef ROCKSDB_LITE

@ -1 +1 @@
Subproject commit e656fa3d196c5b4c8a77255db1e6cd36a7ded348 Subproject commit bf2c335184de16a3cc1787fa97ef9f22f7114238

@ -2,12 +2,14 @@ cache/cache.cc
cache/cache_entry_roles.cc cache/cache_entry_roles.cc
cache/cache_key.cc cache/cache_key.cc
cache/cache_reservation_manager.cc cache/cache_reservation_manager.cc
cache/charged_cache.cc
cache/clock_cache.cc cache/clock_cache.cc
cache/fast_lru_cache.cc cache/fast_lru_cache.cc
cache/lru_cache.cc cache/lru_cache.cc
cache/compressed_secondary_cache.cc cache/compressed_secondary_cache.cc
cache/sharded_cache.cc cache/sharded_cache.cc
db/arena_wrapped_db_iter.cc db/arena_wrapped_db_iter.cc
db/blob/blob_contents.cc
db/blob/blob_fetcher.cc db/blob/blob_fetcher.cc
db/blob/blob_file_addition.cc db/blob/blob_file_addition.cc
db/blob/blob_file_builder.cc db/blob/blob_file_builder.cc
@ -31,7 +33,11 @@ db/compaction/compaction_picker.cc
db/compaction/compaction_picker_fifo.cc db/compaction/compaction_picker_fifo.cc
db/compaction/compaction_picker_level.cc db/compaction/compaction_picker_level.cc
db/compaction/compaction_picker_universal.cc db/compaction/compaction_picker_universal.cc
db/compaction/compaction_service_job.cc
db/compaction/compaction_state.cc
db/compaction/compaction_outputs.cc
db/compaction/sst_partitioner.cc db/compaction/sst_partitioner.cc
db/compaction/subcompaction_state.cc
db/convenience.cc db/convenience.cc
db/db_filesnapshot.cc db/db_filesnapshot.cc
db/db_impl/compacted_db_impl.cc db/db_impl/compacted_db_impl.cc
@ -66,10 +72,11 @@ db/memtable_list.cc
db/merge_helper.cc db/merge_helper.cc
db/merge_operator.cc db/merge_operator.cc
db/output_validator.cc db/output_validator.cc
db/periodic_work_scheduler.cc db/periodic_task_scheduler.cc
db/range_del_aggregator.cc db/range_del_aggregator.cc
db/range_tombstone_fragmenter.cc db/range_tombstone_fragmenter.cc
db/repair.cc db/repair.cc
db/seqno_to_time_mapping.cc
db/snapshot_impl.cc db/snapshot_impl.cc
db/table_cache.cc db/table_cache.cc
db/table_properties_collector.cc db/table_properties_collector.cc
@ -82,6 +89,7 @@ db/version_set.cc
db/wal_edit.cc db/wal_edit.cc
db/wal_manager.cc db/wal_manager.cc
db/wide/wide_column_serialization.cc db/wide/wide_column_serialization.cc
db/wide/wide_columns.cc
db/write_batch.cc db/write_batch.cc
db/write_batch_base.cc db/write_batch_base.cc
db/write_controller.cc db/write_controller.cc
@ -224,6 +232,7 @@ util/ribbon_config.cc
util/slice.cc util/slice.cc
util/file_checksum_helper.cc util/file_checksum_helper.cc
util/status.cc util/status.cc
util/stderr_logger.cc
util/string_util.cc util/string_util.cc
util/thread_local.cc util/thread_local.cc
util/threadpool_imp.cc util/threadpool_imp.cc

@ -128,8 +128,8 @@ where
use self::Decision::{Change, Keep, Remove}; use self::Decision::{Change, Keep, Remove};
let cb = &mut *(raw_cb as *mut F); let cb = &mut *(raw_cb as *mut F);
let key = slice::from_raw_parts(raw_key as *const u8, key_length as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_length);
let oldval = slice::from_raw_parts(existing_value as *const u8, value_length as usize); let oldval = slice::from_raw_parts(existing_value as *const u8, value_length);
let result = cb.filter(level as u32, key, oldval); let result = cb.filter(level as u32, key, oldval);
match result { match result {
Keep => 0, Keep => 0,

@ -43,8 +43,8 @@ pub unsafe extern "C" fn compare_callback(
b_len: size_t, b_len: size_t,
) -> c_int { ) -> c_int {
let cb: &mut ComparatorCallback = &mut *(raw_cb as *mut ComparatorCallback); let cb: &mut ComparatorCallback = &mut *(raw_cb as *mut ComparatorCallback);
let a: &[u8] = slice::from_raw_parts(a_raw as *const u8, a_len as usize); let a: &[u8] = slice::from_raw_parts(a_raw as *const u8, a_len);
let b: &[u8] = slice::from_raw_parts(b_raw as *const u8, b_len as usize); let b: &[u8] = slice::from_raw_parts(b_raw as *const u8, b_len);
match (cb.f)(a, b) { match (cb.f)(a, b) {
Ordering::Less => -1, Ordering::Less => -1,
Ordering::Equal => 0, Ordering::Equal => 0,

@ -1874,11 +1874,7 @@ impl<T: ThreadMode, D: DBInner> DBCommon<T, D> {
opts: &IngestExternalFileOptions, opts: &IngestExternalFileOptions,
paths: Vec<P>, paths: Vec<P>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let paths_v: Vec<CString> = paths let paths_v: Vec<CString> = paths.iter().map(to_cpath).collect::<Result<Vec<_>, _>>()?;
.iter()
.map(|path| to_cpath(&path))
.collect::<Result<Vec<_>, _>>()?;
let cpaths: Vec<_> = paths_v.iter().map(|path| path.as_ptr()).collect(); let cpaths: Vec<_> = paths_v.iter().map(|path| path.as_ptr()).collect();
self.ingest_external_file_raw(opts, &paths_v, &cpaths) self.ingest_external_file_raw(opts, &paths_v, &cpaths)
@ -1902,11 +1898,7 @@ impl<T: ThreadMode, D: DBInner> DBCommon<T, D> {
opts: &IngestExternalFileOptions, opts: &IngestExternalFileOptions,
paths: Vec<P>, paths: Vec<P>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let paths_v: Vec<CString> = paths let paths_v: Vec<CString> = paths.iter().map(to_cpath).collect::<Result<Vec<_>, _>>()?;
.iter()
.map(|path| to_cpath(&path))
.collect::<Result<Vec<_>, _>>()?;
let cpaths: Vec<_> = paths_v.iter().map(|path| path.as_ptr()).collect(); let cpaths: Vec<_> = paths_v.iter().map(|path| path.as_ptr()).collect();
self.ingest_external_file_raw_cf(cf, opts, &paths_v, &cpaths) self.ingest_external_file_raw_cf(cf, opts, &paths_v, &cpaths)
@ -1966,7 +1958,7 @@ impl<T: ThreadMode, D: DBInner> DBCommon<T, D> {
from_cstr(ffi::rocksdb_livefiles_column_family_name(files, i)); from_cstr(ffi::rocksdb_livefiles_column_family_name(files, i));
let name = from_cstr(ffi::rocksdb_livefiles_name(files, i)); let name = from_cstr(ffi::rocksdb_livefiles_name(files, i));
let size = ffi::rocksdb_livefiles_size(files, i); let size = ffi::rocksdb_livefiles_size(files, i);
let level = ffi::rocksdb_livefiles_level(files, i) as i32; let level = ffi::rocksdb_livefiles_level(files, i);
// get smallest key inside file // get smallest key inside file
let smallest_key = ffi::rocksdb_livefiles_smallestkey(files, i, &mut key_size); let smallest_key = ffi::rocksdb_livefiles_smallestkey(files, i, &mut key_size);
@ -2083,7 +2075,7 @@ impl<I: DBInner> DBCommon<SingleThreaded, I> {
if let Some(cf) = self.cfs.cfs.remove(name) { if let Some(cf) = self.cfs.cfs.remove(name) {
self.drop_column_family(cf.inner, cf) self.drop_column_family(cf.inner, cf)
} else { } else {
Err(Error::new(format!("Invalid column family: {}", name))) Err(Error::new(format!("Invalid column family: {name}")))
} }
} }
@ -2110,7 +2102,7 @@ impl<I: DBInner> DBCommon<MultiThreaded, I> {
if let Some(cf) = self.cfs.cfs.write().unwrap().remove(name) { if let Some(cf) = self.cfs.cfs.write().unwrap().remove(name) {
self.drop_column_family(cf.inner, cf) self.drop_column_family(cf.inner, cf)
} else { } else {
Err(Error::new(format!("Invalid column family: {}", name))) Err(Error::new(format!("Invalid column family: {name}")))
} }
} }
@ -2164,11 +2156,11 @@ fn convert_options(opts: &[(&str, &str)]) -> Result<Vec<(CString, CString)>, Err
.map(|(name, value)| { .map(|(name, value)| {
let cname = match CString::new(name.as_bytes()) { let cname = match CString::new(name.as_bytes()) {
Ok(cname) => cname, Ok(cname) => cname,
Err(e) => return Err(Error::new(format!("Invalid option name `{}`", e))), Err(e) => return Err(Error::new(format!("Invalid option name `{e}`"))),
}; };
let cvalue = match CString::new(value.as_bytes()) { let cvalue = match CString::new(value.as_bytes()) {
Ok(cvalue) => cvalue, Ok(cvalue) => cvalue,
Err(e) => return Err(Error::new(format!("Invalid option value: `{}`", e))), Err(e) => return Err(Error::new(format!("Invalid option value: `{e}`"))),
}; };
Ok((cname, cvalue)) Ok((cname, cvalue))
}) })

@ -336,7 +336,7 @@ impl<'a, D: DBAccess> DBRawIteratorWithThreadMode<'a, D> {
let mut key_len: size_t = 0; let mut key_len: size_t = 0;
let key_len_ptr: *mut size_t = &mut key_len; let key_len_ptr: *mut size_t = &mut key_len;
let key_ptr = ffi::rocksdb_iter_key(self.inner.as_ptr(), key_len_ptr); let key_ptr = ffi::rocksdb_iter_key(self.inner.as_ptr(), key_len_ptr);
slice::from_raw_parts(key_ptr as *const c_uchar, key_len as usize) slice::from_raw_parts(key_ptr as *const c_uchar, key_len)
} }
} }
@ -348,7 +348,7 @@ impl<'a, D: DBAccess> DBRawIteratorWithThreadMode<'a, D> {
let mut val_len: size_t = 0; let mut val_len: size_t = 0;
let val_len_ptr: *mut size_t = &mut val_len; let val_len_ptr: *mut size_t = &mut val_len;
let val_ptr = ffi::rocksdb_iter_value(self.inner.as_ptr(), val_len_ptr); let val_ptr = ffi::rocksdb_iter_value(self.inner.as_ptr(), val_len_ptr);
slice::from_raw_parts(val_ptr as *const c_uchar, val_len as usize) slice::from_raw_parts(val_ptr as *const c_uchar, val_len)
} }
} }
} }

@ -107,7 +107,7 @@ impl Drop for EnvWrapper {
impl Env { impl Env {
/// Returns default env /// Returns default env
pub fn default() -> Result<Self, Error> { pub fn new() -> Result<Self, Error> {
let env = unsafe { ffi::rocksdb_create_default_env() }; let env = unsafe { ffi::rocksdb_create_default_env() };
if env.is_null() { if env.is_null() {
Err(Error::new("Could not create mem env".to_owned())) Err(Error::new("Could not create mem env".to_owned()))

@ -34,7 +34,7 @@ unsafe impl<'a> Sync for DBPinnableSlice<'a> {}
impl<'a> AsRef<[u8]> for DBPinnableSlice<'a> { impl<'a> AsRef<[u8]> for DBPinnableSlice<'a> {
fn as_ref(&self) -> &[u8] { fn as_ref(&self) -> &[u8] {
// Implement this via Deref so as not to repeat ourselves // Implement this via Deref so as not to repeat ourselves
&**self self
} }
} }

@ -88,7 +88,7 @@ pub unsafe extern "C" fn delete_callback(
if !value.is_null() { if !value.is_null() {
drop(Box::from_raw(slice::from_raw_parts_mut( drop(Box::from_raw(slice::from_raw_parts_mut(
value as *mut u8, value as *mut u8,
value_length as usize, value_length,
))); )));
} }
} }
@ -114,13 +114,13 @@ pub unsafe extern "C" fn full_merge_callback<F: MergeFn, PF: MergeFn>(
) -> *mut c_char { ) -> *mut c_char {
let cb = &mut *(raw_cb as *mut MergeOperatorCallback<F, PF>); let cb = &mut *(raw_cb as *mut MergeOperatorCallback<F, PF>);
let operands = &MergeOperands::new(operands_list, operands_list_len, num_operands); let operands = &MergeOperands::new(operands_list, operands_list_len, num_operands);
let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_len);
let oldval = if existing_value.is_null() { let oldval = if existing_value.is_null() {
None None
} else { } else {
Some(slice::from_raw_parts( Some(slice::from_raw_parts(
existing_value as *const u8, existing_value as *const u8,
existing_value_len as usize, existing_value_len,
)) ))
}; };
(cb.full_merge_fn)(key, oldval, operands).map_or_else( (cb.full_merge_fn)(key, oldval, operands).map_or_else(
@ -149,7 +149,7 @@ pub unsafe extern "C" fn partial_merge_callback<F: MergeFn, PF: MergeFn>(
) -> *mut c_char { ) -> *mut c_char {
let cb = &mut *(raw_cb as *mut MergeOperatorCallback<F, PF>); let cb = &mut *(raw_cb as *mut MergeOperatorCallback<F, PF>);
let operands = &MergeOperands::new(operands_list, operands_list_len, num_operands); let operands = &MergeOperands::new(operands_list, operands_list_len, num_operands);
let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_len);
(cb.partial_merge_fn)(key, None, operands).map_or_else( (cb.partial_merge_fn)(key, None, operands).map_or_else(
|| { || {
*new_value_length = 0; *new_value_length = 0;
@ -209,7 +209,7 @@ impl MergeOperands {
let spacing = mem::size_of::<*const *const u8>(); let spacing = mem::size_of::<*const *const u8>();
let spacing_len = mem::size_of::<*const size_t>(); let spacing_len = mem::size_of::<*const size_t>();
let len_ptr = (base_len + (spacing_len * index)) as *const size_t; let len_ptr = (base_len + (spacing_len * index)) as *const size_t;
let len = *len_ptr as usize; let len = *len_ptr;
let ptr = base + (spacing * index); let ptr = base + (spacing * index);
Some(slice::from_raw_parts( Some(slice::from_raw_parts(
*(ptr as *const *const u8) as *const u8, *(ptr as *const *const u8) as *const u8,

@ -235,7 +235,7 @@ pub const OPTIONS_STATISTICS: &CStr = property!("options-statistics");
/// ///
/// Expects `name` not to contain any interior NUL bytes. /// Expects `name` not to contain any interior NUL bytes.
unsafe fn level_property(name: &str, level: usize) -> CString { unsafe fn level_property(name: &str, level: usize) -> CString {
let bytes = format!("rocksdb.{}{}\0", name, level).into_bytes(); let bytes = format!("rocksdb.{name}{level}\0").into_bytes();
// SAFETY: We’re appending terminating NUL and all our call sites pass // SAFETY: We’re appending terminating NUL and all our call sites pass
// a string without interior NUL bytes. // a string without interior NUL bytes.
CString::from_vec_with_nul_unchecked(bytes) CString::from_vec_with_nul_unchecked(bytes)

@ -98,7 +98,7 @@ pub unsafe extern "C" fn transform_callback(
dst_length: *mut size_t, dst_length: *mut size_t,
) -> *mut c_char { ) -> *mut c_char {
let cb = &mut *(raw_cb as *mut TransformCallback); let cb = &mut *(raw_cb as *mut TransformCallback);
let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_len);
let prefix = (cb.transform_fn)(key); let prefix = (cb.transform_fn)(key);
*dst_length = prefix.len() as size_t; *dst_length = prefix.len() as size_t;
prefix.as_ptr() as *mut c_char prefix.as_ptr() as *mut c_char
@ -110,6 +110,6 @@ pub unsafe extern "C" fn in_domain_callback(
key_len: size_t, key_len: size_t,
) -> c_uchar { ) -> c_uchar {
let cb = &mut *(raw_cb as *mut TransformCallback); let cb = &mut *(raw_cb as *mut TransformCallback);
let key = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key = slice::from_raw_parts(raw_key as *const u8, key_len);
c_uchar::from(cb.in_domain_fn.map_or(true, |in_domain| in_domain(key))) c_uchar::from(cb.in_domain_fn.map_or(true, |in_domain| in_domain(key)))
} }

@ -165,7 +165,7 @@ impl<'db, DB> Transaction<'db, DB> {
None None
} else { } else {
let mut vec = vec![0; name_len]; let mut vec = vec![0; name_len];
std::ptr::copy_nonoverlapping(name as *mut u8, vec.as_mut_ptr(), name_len as usize); std::ptr::copy_nonoverlapping(name as *mut u8, vec.as_mut_ptr(), name_len);
ffi::rocksdb_free(name as *mut c_void); ffi::rocksdb_free(name as *mut c_void);
Some(vec) Some(vec)
} }

@ -72,8 +72,8 @@ unsafe extern "C" fn writebatch_put_callback(
// freeing the resource before we are done with it // freeing the resource before we are done with it
let boxed_cb = Box::from_raw(state as *mut &mut dyn WriteBatchIterator); let boxed_cb = Box::from_raw(state as *mut &mut dyn WriteBatchIterator);
let leaked_cb = Box::leak(boxed_cb); let leaked_cb = Box::leak(boxed_cb);
let key = slice::from_raw_parts(k as *const u8, klen as usize); let key = slice::from_raw_parts(k as *const u8, klen);
let value = slice::from_raw_parts(v as *const u8, vlen as usize); let value = slice::from_raw_parts(v as *const u8, vlen);
leaked_cb.put( leaked_cb.put(
key.to_vec().into_boxed_slice(), key.to_vec().into_boxed_slice(),
value.to_vec().into_boxed_slice(), value.to_vec().into_boxed_slice(),
@ -85,7 +85,7 @@ unsafe extern "C" fn writebatch_delete_callback(state: *mut c_void, k: *const c_
// freeing the resource before we are done with it // freeing the resource before we are done with it
let boxed_cb = Box::from_raw(state as *mut &mut dyn WriteBatchIterator); let boxed_cb = Box::from_raw(state as *mut &mut dyn WriteBatchIterator);
let leaked_cb = Box::leak(boxed_cb); let leaked_cb = Box::leak(boxed_cb);
let key = slice::from_raw_parts(k as *const u8, klen as usize); let key = slice::from_raw_parts(k as *const u8, klen);
leaked_cb.delete(key.to_vec().into_boxed_slice()); leaked_cb.delete(key.to_vec().into_boxed_slice());
} }
@ -113,7 +113,7 @@ impl<const TRANSACTION: bool> WriteBatchWithTransaction<TRANSACTION> {
unsafe { unsafe {
let mut batch_size: size_t = 0; let mut batch_size: size_t = 0;
ffi::rocksdb_writebatch_data(self.inner, &mut batch_size); ffi::rocksdb_writebatch_data(self.inner, &mut batch_size);
batch_size as usize batch_size
} }
} }
@ -122,7 +122,7 @@ impl<const TRANSACTION: bool> WriteBatchWithTransaction<TRANSACTION> {
unsafe { unsafe {
let mut batch_size: size_t = 0; let mut batch_size: size_t = 0;
let batch_data = ffi::rocksdb_writebatch_data(self.inner, &mut batch_size); let batch_data = ffi::rocksdb_writebatch_data(self.inner, &mut batch_size);
std::slice::from_raw_parts(batch_data as _, batch_size as usize) std::slice::from_raw_parts(batch_data as _, batch_size)
} }
} }

@ -78,7 +78,7 @@ fn test_column_family() {
{ {
let mut opts = Options::default(); let mut opts = Options::default();
opts.set_merge_operator_associative("test operator", test_provided_merge); opts.set_merge_operator_associative("test operator", test_provided_merge);
match DB::open_cf(&opts, &n, &["cf1"]) { match DB::open_cf(&opts, &n, ["cf1"]) {
Ok(_db) => println!("successfully opened db with column family"), Ok(_db) => println!("successfully opened db with column family"),
Err(e) => panic!("failed to open db with column family: {}", e), Err(e) => panic!("failed to open db with column family: {}", e),
} }
@ -101,9 +101,9 @@ fn test_column_family() {
// should b able to drop a cf // should b able to drop a cf
{ {
#[cfg(feature = "multi-threaded-cf")] #[cfg(feature = "multi-threaded-cf")]
let db = DB::open_cf(&Options::default(), &n, &["cf1"]).unwrap(); let db = DB::open_cf(&Options::default(), &n, ["cf1"]).unwrap();
#[cfg(not(feature = "multi-threaded-cf"))] #[cfg(not(feature = "multi-threaded-cf"))]
let mut db = DB::open_cf(&Options::default(), &n, &["cf1"]).unwrap(); let mut db = DB::open_cf(&Options::default(), &n, ["cf1"]).unwrap();
match db.drop_cf("cf1") { match db.drop_cf("cf1") {
Ok(_) => println!("cf1 successfully dropped."), Ok(_) => println!("cf1 successfully dropped."),
@ -133,7 +133,7 @@ fn test_can_open_db_with_results_of_list_cf() {
{ {
let options = Options::default(); let options = Options::default();
let cfs = DB::list_cf(&options, &n).unwrap(); let cfs = DB::list_cf(&options, &n).unwrap();
let db = DB::open_cf(&options, &n, &cfs).unwrap(); let db = DB::open_cf(&options, &n, cfs).unwrap();
assert!(db.cf_handle("cf1").is_some()); assert!(db.cf_handle("cf1").is_some());
} }
@ -149,7 +149,7 @@ fn test_create_missing_column_family() {
opts.create_if_missing(true); opts.create_if_missing(true);
opts.create_missing_column_families(true); opts.create_missing_column_families(true);
match DB::open_cf(&opts, &n, &["cf1"]) { match DB::open_cf(&opts, &n, ["cf1"]) {
Ok(_db) => println!("successfully created new column family"), Ok(_db) => println!("successfully created new column family"),
Err(e) => panic!("failed to create new column family: {}", e), Err(e) => panic!("failed to create new column family: {}", e),
} }
@ -189,7 +189,7 @@ fn test_merge_operator() {
{ {
let mut opts = Options::default(); let mut opts = Options::default();
opts.set_merge_operator_associative("test operator", test_provided_merge); opts.set_merge_operator_associative("test operator", test_provided_merge);
let db = match DB::open_cf(&opts, &n, &["cf1"]) { let db = match DB::open_cf(&opts, &n, ["cf1"]) {
Ok(db) => { Ok(db) => {
println!("successfully opened db with column family"); println!("successfully opened db with column family");
db db
@ -297,9 +297,9 @@ fn test_create_duplicate_column_family() {
opts.create_missing_column_families(true); opts.create_missing_column_families(true);
#[cfg(feature = "multi-threaded-cf")] #[cfg(feature = "multi-threaded-cf")]
let db = DB::open_cf(&opts, &n, &["cf1"]).unwrap(); let db = DB::open_cf(&opts, &n, ["cf1"]).unwrap();
#[cfg(not(feature = "multi-threaded-cf"))] #[cfg(not(feature = "multi-threaded-cf"))]
let mut db = DB::open_cf(&opts, &n, &["cf1"]).unwrap(); let mut db = DB::open_cf(&opts, &n, ["cf1"]).unwrap();
assert!(db.create_cf("cf1", &opts).is_err()); assert!(db.create_cf("cf1", &opts).is_err());
} }
@ -321,11 +321,12 @@ fn test_no_leaked_column_family() {
let db = DB::open(&opts, &n).unwrap(); let db = DB::open(&opts, &n).unwrap();
#[cfg(not(feature = "multi-threaded-cf"))] #[cfg(not(feature = "multi-threaded-cf"))]
let mut db = DB::open(&opts, &n).unwrap(); let mut db = DB::open(&opts, &n).unwrap();
let large_blob = [0x20; 1024 * 1024];
#[cfg(feature = "multi-threaded-cf")] #[cfg(feature = "multi-threaded-cf")]
let mut outlived_cf = None; let mut outlived_cf = None;
let large_blob = vec![0x20; 1024 * 1024];
// repeat creating and dropping cfs many time to indirectly detect // repeat creating and dropping cfs many time to indirectly detect
// possible leak via large dir. // possible leak via large dir.
for cf_index in 0..20 { for cf_index in 0..20 {
@ -341,7 +342,6 @@ fn test_no_leaked_column_family() {
// force create an SST file // force create an SST file
db.flush_cf(&cf).unwrap(); db.flush_cf(&cf).unwrap();
db.drop_cf(&cf_name).unwrap(); db.drop_cf(&cf_name).unwrap();
#[cfg(feature = "multi-threaded-cf")] #[cfg(feature = "multi-threaded-cf")]
@ -362,7 +362,10 @@ fn test_no_leaked_column_family() {
#[cfg(feature = "multi-threaded-cf")] #[cfg(feature = "multi-threaded-cf")]
{ {
let outlived_cf = outlived_cf.unwrap(); let outlived_cf = outlived_cf.unwrap();
assert_eq!(db.get_cf(&outlived_cf, "k0").unwrap().unwrap(), &large_blob); assert_eq!(
&db.get_cf(&outlived_cf, "k0").unwrap().unwrap(),
&large_blob
);
drop(outlived_cf); drop(outlived_cf);
} }

@ -572,7 +572,7 @@ fn test_open_cf_with_ttl() {
let mut opts = Options::default(); let mut opts = Options::default();
opts.create_if_missing(true); opts.create_if_missing(true);
opts.create_missing_column_families(true); opts.create_missing_column_families(true);
let db = DB::open_cf_with_ttl(&opts, &path, &["test_cf"], Duration::from_secs(1)).unwrap(); let db = DB::open_cf_with_ttl(&opts, &path, ["test_cf"], Duration::from_secs(1)).unwrap();
let cf = db.cf_handle("test_cf").unwrap(); let cf = db.cf_handle("test_cf").unwrap();
db.put_cf(&cf, b"key1", b"value1").unwrap(); db.put_cf(&cf, b"key1", b"value1").unwrap();
@ -740,7 +740,7 @@ fn env_and_dbpaths_test() {
opts.create_missing_column_families(true); opts.create_missing_column_families(true);
{ {
let mut env = Env::default().unwrap(); let mut env = Env::new().unwrap();
env.lower_high_priority_thread_pool_cpu_priority(); env.lower_high_priority_thread_pool_cpu_priority();
opts.set_env(&env); opts.set_env(&env);
} }
@ -909,7 +909,7 @@ fn get_with_cache_and_bulkload_test() {
opts.set_stats_persist_period_sec(0); opts.set_stats_persist_period_sec(0);
// test Env::Default()->SetBackgroundThreads(0, Env::Priority::BOTTOM); // test Env::Default()->SetBackgroundThreads(0, Env::Priority::BOTTOM);
let mut env = Env::default().unwrap(); let mut env = Env::new().unwrap();
env.set_bottom_priority_background_threads(0); env.set_bottom_priority_background_threads(0);
opts.set_env(&env); opts.set_env(&env);
@ -1044,7 +1044,7 @@ fn get_with_cache_and_bulkload_and_blobs_test() {
opts.set_stats_persist_period_sec(0); opts.set_stats_persist_period_sec(0);
// test Env::Default()->SetBackgroundThreads(0, Env::Priority::BOTTOM); // test Env::Default()->SetBackgroundThreads(0, Env::Priority::BOTTOM);
let mut env = Env::default().unwrap(); let mut env = Env::new().unwrap();
env.set_bottom_priority_background_threads(0); env.set_bottom_priority_background_threads(0);
opts.set_env(&env); opts.set_env(&env);
@ -1161,7 +1161,7 @@ fn multi_get() {
let k1_snap = db.snapshot(); let k1_snap = db.snapshot();
db.put(b"k2", b"v2").unwrap(); db.put(b"k2", b"v2").unwrap();
let _ = db.multi_get(&[b"k0"; 40]); let _ = db.multi_get([b"k0"; 40]);
let assert_values = |values: Vec<_>| { let assert_values = |values: Vec<_>| {
assert_eq!(3, values.len()); assert_eq!(3, values.len());
@ -1171,14 +1171,14 @@ fn multi_get() {
}; };
let values = db let values = db
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert_values(values); assert_values(values);
let values = DBAccess::multi_get_opt(&db, &[b"k0", b"k1", b"k2"], &Default::default()) let values = DBAccess::multi_get_opt(&db, [b"k0", b"k1", b"k2"], &Default::default())
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -1187,7 +1187,7 @@ fn multi_get() {
let values = db let values = db
.snapshot() .snapshot()
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -1195,7 +1195,7 @@ fn multi_get() {
assert_values(values); assert_values(values);
let none_values = initial_snap let none_values = initial_snap
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -1203,7 +1203,7 @@ fn multi_get() {
assert_eq!(none_values, vec![None; 3]); assert_eq!(none_values, vec![None; 3]);
let k1_only = k1_snap let k1_only = k1_snap
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -1220,7 +1220,7 @@ fn multi_get_cf() {
let mut opts = Options::default(); let mut opts = Options::default();
opts.create_if_missing(true); opts.create_if_missing(true);
opts.create_missing_column_families(true); opts.create_missing_column_families(true);
let db = DB::open_cf(&opts, &path, &["cf0", "cf1", "cf2"]).unwrap(); let db = DB::open_cf(&opts, &path, ["cf0", "cf1", "cf2"]).unwrap();
let cf0 = db.cf_handle("cf0").unwrap(); let cf0 = db.cf_handle("cf0").unwrap();
@ -1250,7 +1250,7 @@ fn batched_multi_get_cf() {
let mut opts = Options::default(); let mut opts = Options::default();
opts.create_if_missing(true); opts.create_if_missing(true);
opts.create_missing_column_families(true); opts.create_missing_column_families(true);
let db = DB::open_cf(&opts, &path, &["cf0"]).unwrap(); let db = DB::open_cf(&opts, &path, ["cf0"]).unwrap();
let cf = db.cf_handle("cf0").unwrap(); let cf = db.cf_handle("cf0").unwrap();
db.put_cf(&cf, b"k1", b"v1").unwrap(); db.put_cf(&cf, b"k1", b"v1").unwrap();
@ -1288,7 +1288,7 @@ fn key_may_exist_cf() {
let mut opts = Options::default(); let mut opts = Options::default();
opts.create_if_missing(true); opts.create_if_missing(true);
opts.create_missing_column_families(true); opts.create_missing_column_families(true);
let db = DB::open_cf(&opts, &path, &["cf"]).unwrap(); let db = DB::open_cf(&opts, &path, ["cf"]).unwrap();
let cf = db.cf_handle("cf").unwrap(); let cf = db.cf_handle("cf").unwrap();
assert!(!db.key_may_exist_cf(&cf, "nonexistent")); assert!(!db.key_may_exist_cf(&cf, "nonexistent"));

@ -174,7 +174,7 @@ fn test_prefix_iterator_uses_full_prefix() {
} }
assert_iter( assert_iter(
db.prefix_iterator(&[0, 1, 1]), db.prefix_iterator([0, 1, 1]),
&[ &[
pair(&[0, 1, 1, 1], b"444"), pair(&[0, 1, 1, 1], b"444"),
pair(&[0, 1, 2, 1], b"555"), pair(&[0, 1, 2, 1], b"555"),

@ -81,7 +81,7 @@ fn multi_get() {
let k1_snap = db.snapshot(); let k1_snap = db.snapshot();
db.put(b"k2", b"v2").unwrap(); db.put(b"k2", b"v2").unwrap();
let _ = db.multi_get(&[b"k0"; 40]); let _ = db.multi_get([b"k0"; 40]);
let assert_values = |values: Vec<_>| { let assert_values = |values: Vec<_>| {
assert_eq!(3, values.len()); assert_eq!(3, values.len());
@ -91,14 +91,14 @@ fn multi_get() {
}; };
let values = db let values = db
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert_values(values); assert_values(values);
let values = DBAccess::multi_get_opt(&db, &[b"k0", b"k1", b"k2"], &Default::default()) let values = DBAccess::multi_get_opt(&db, [b"k0", b"k1", b"k2"], &Default::default())
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -107,7 +107,7 @@ fn multi_get() {
let values = db let values = db
.snapshot() .snapshot()
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -115,7 +115,7 @@ fn multi_get() {
assert_values(values); assert_values(values);
let none_values = initial_snap let none_values = initial_snap
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -123,7 +123,7 @@ fn multi_get() {
assert_eq!(none_values, vec![None; 3]); assert_eq!(none_values, vec![None; 3]);
let k1_only = k1_snap let k1_only = k1_snap
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -132,7 +132,7 @@ fn multi_get() {
let txn = db.transaction(); let txn = db.transaction();
let values = txn let values = txn
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -150,7 +150,7 @@ fn multi_get_cf() {
opts.create_if_missing(true); opts.create_if_missing(true);
opts.create_missing_column_families(true); opts.create_missing_column_families(true);
let db: OptimisticTransactionDB = let db: OptimisticTransactionDB =
OptimisticTransactionDB::open_cf(&opts, &path, &["cf0", "cf1", "cf2"]).unwrap(); OptimisticTransactionDB::open_cf(&opts, &path, ["cf0", "cf1", "cf2"]).unwrap();
let cf0 = db.cf_handle("cf0").unwrap(); let cf0 = db.cf_handle("cf0").unwrap();

@ -103,7 +103,7 @@ fn multi_get() {
let k1_snap = db.snapshot(); let k1_snap = db.snapshot();
db.put(b"k2", b"v2").unwrap(); db.put(b"k2", b"v2").unwrap();
let _ = db.multi_get(&[b"k0"; 40]); let _ = db.multi_get([b"k0"; 40]);
let assert_values = |values: Vec<_>| { let assert_values = |values: Vec<_>| {
assert_eq!(3, values.len()); assert_eq!(3, values.len());
@ -113,14 +113,14 @@ fn multi_get() {
}; };
let values = db let values = db
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert_values(values); assert_values(values);
let values = DBAccess::multi_get_opt(&db, &[b"k0", b"k1", b"k2"], &Default::default()) let values = DBAccess::multi_get_opt(&db, [b"k0", b"k1", b"k2"], &Default::default())
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -129,7 +129,7 @@ fn multi_get() {
let values = db let values = db
.snapshot() .snapshot()
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -137,7 +137,7 @@ fn multi_get() {
assert_values(values); assert_values(values);
let none_values = initial_snap let none_values = initial_snap
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -145,7 +145,7 @@ fn multi_get() {
assert_eq!(none_values, vec![None; 3]); assert_eq!(none_values, vec![None; 3]);
let k1_only = k1_snap let k1_only = k1_snap
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -154,7 +154,7 @@ fn multi_get() {
let txn = db.transaction(); let txn = db.transaction();
let values = txn let values = txn
.multi_get(&[b"k0", b"k1", b"k2"]) .multi_get([b"k0", b"k1", b"k2"])
.into_iter() .into_iter()
.map(Result::unwrap) .map(Result::unwrap)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -175,7 +175,7 @@ fn multi_get_cf() {
&opts, &opts,
&TransactionDBOptions::default(), &TransactionDBOptions::default(),
&path, &path,
&["cf0", "cf1", "cf2"], ["cf0", "cf1", "cf2"],
) )
.unwrap(); .unwrap();

Loading…
Cancel
Save