Follow the default behavior of rustfmt (#224)

without.crypto
Kagami Sascha Rosylight 10 months ago committed by GitHub
parent c30523065a
commit 0f3e934f77
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 12
      .github/workflows/ci.yml
  2. 18
      .rustfmt.toml
  3. 23
      examples/iterator.rs
  4. 137
      examples/simple-store.rs
  5. 42
      src/backend.rs
  6. 25
      src/backend/impl_lmdb.rs
  7. 189
      src/backend/impl_lmdb/arch_migrator.rs
  8. 6
      src/backend/impl_lmdb/arch_migrator_error.rs
  9. 92
      src/backend/impl_lmdb/environment.rs
  10. 15
      src/backend/impl_lmdb/error.rs
  11. 13
      src/backend/impl_lmdb/flags.rs
  12. 10
      src/backend/impl_lmdb/iter.rs
  13. 40
      src/backend/impl_lmdb/transaction.rs
  14. 21
      src/backend/impl_safe.rs
  15. 18
      src/backend/impl_safe/cursor.rs
  16. 12
      src/backend/impl_safe/database.rs
  17. 67
      src/backend/impl_safe/environment.rs
  18. 19
      src/backend/impl_safe/error.rs
  19. 18
      src/backend/impl_safe/flags.rs
  20. 35
      src/backend/impl_safe/snapshot.rs
  21. 92
      src/backend/impl_safe/transaction.rs
  22. 37
      src/backend/traits.rs
  23. 13
      src/bin/dump.rs
  24. 29
      src/bin/rand.rs
  25. 63
      src/env.rs
  26. 13
      src/error.rs
  27. 14
      src/helpers.rs
  28. 30
      src/lib.rs
  29. 76
      src/manager.rs
  30. 30
      src/migrator.rs
  31. 27
      src/readwrite.rs
  32. 4
      src/store.rs
  33. 189
      src/store/integer.rs
  34. 356
      src/store/integermulti.rs
  35. 33
      src/store/multi.rs
  36. 25
      src/store/single.rs
  37. 39
      src/value.rs
  38. 275
      tests/env-all.rs
  39. 696
      tests/env-lmdb.rs
  40. 280
      tests/env-migration.rs
  41. 592
      tests/env-safe.rs
  42. 27
      tests/integer-store.rs
  43. 250
      tests/manager.rs
  44. 59
      tests/multi-integer-store.rs
  45. 37
      tests/test_txn.rs

@ -4,12 +4,6 @@ on: [push, pull_request]
env:
RUST_BACKTRACE: 1
# We install a known-to-have-rustfmt version of the nightly toolchain
# in order to run the nightly version of rustfmt, which supports rules
# that we depend upon. When updating, pick a suitable nightly version
# from https://rust-lang.github.io/rustup-components-history/
# See .rustfmt.toml for the list of unstable features.
KNOWN_TO_HAVE_RUSTFMT: nightly-2022-08-12
defaults:
run:
@ -37,11 +31,7 @@ jobs:
rustup default ${{ matrix.toolchain }}${{ matrix.target.host }}
- if: ${{ matrix.toolchain == 'nightly' }}
run: |
rustup toolchain install $KNOWN_TO_HAVE_RUSTFMT --profile minimal --component rustfmt clippy
cargo +$KNOWN_TO_HAVE_RUSTFMT fmt --all -- --check
- if: ${{ matrix.toolchain == 'nightly' }}
run: cargo +$KNOWN_TO_HAVE_RUSTFMT clippy --all-features -- -D warnings
run: cargo clippy --all-features -- -D warnings
env:
CC: clang

@ -1,18 +0,0 @@
match_block_trailing_comma = true
max_width = 120
use_small_heuristics = "Off"
### unstable, nightly-only
# https://github.com/rust-lang/rustfmt/issues/3349
comment_width = 90
# https://github.com/rust-lang/rustfmt/issues/3374
force_multiline_blocks = true
# https://github.com/rust-lang/rustfmt/issues/4991
imports_granularity="Crate"
# https://github.com/rust-lang/rustfmt/issues/3361
imports_layout = "Vertical"
# https://github.com/rust-lang/rustfmt/issues/3363
reorder_impl_items = true
# https://github.com/rust-lang/rustfmt/issues/3347
wrap_comments = true

@ -7,25 +7,13 @@
//!
//! cargo run --example iterator
use std::{
fs,
str,
};
use std::{fs, str};
use tempfile::Builder;
use rkv::{
backend::{
SafeMode,
SafeModeDatabase,
SafeModeEnvironment,
},
Manager,
Rkv,
SingleStore,
StoreError,
StoreOptions,
Value,
backend::{SafeMode, SafeModeDatabase, SafeModeEnvironment},
Manager, Rkv, SingleStore, StoreError, StoreOptions, Value,
};
fn main() {
@ -67,7 +55,10 @@ fn main() {
}
}
fn populate_store(k: &Rkv<SafeModeEnvironment>, store: SingleStore<SafeModeDatabase>) -> Result<(), StoreError> {
fn populate_store(
k: &Rkv<SafeModeEnvironment>,
store: SingleStore<SafeModeDatabase>,
) -> Result<(), StoreError> {
let mut writer = k.write()?;
for (country, city) in vec![
("Canada", Value::Str("Ottawa")),

@ -12,16 +12,8 @@ use std::fs;
use tempfile::Builder;
use rkv::{
backend::{
SafeMode,
SafeModeDatabase,
SafeModeEnvironment,
SafeModeRwTransaction,
},
Manager,
Rkv,
StoreOptions,
Value,
backend::{SafeMode, SafeModeDatabase, SafeModeEnvironment, SafeModeRwTransaction},
Manager, Rkv, StoreOptions, Value,
};
type MultiStore = rkv::MultiStore<SafeModeDatabase>;
@ -51,7 +43,9 @@ fn delete(store: MultiStore, writer: &mut Writer) {
let vals = vec!["string uno", "string quatro", "string siete"];
// we convert the writer into a cursor so that we can safely read
for i in 0..keys.len() {
store.delete(writer, &keys[i], &Value::Str(vals[i])).unwrap();
store
.delete(writer, &keys[i], &Value::Str(vals[i]))
.unwrap();
}
}
@ -74,13 +68,31 @@ fn main() {
// Use a writer to mutate the store
let mut writer = k.write().unwrap();
store.put(&mut writer, "int", &Value::I64(1234)).unwrap();
store.put(&mut writer, "uint", &Value::U64(1234_u64)).unwrap();
store.put(&mut writer, "float", &Value::F64(1234.0.into())).unwrap();
store.put(&mut writer, "instant", &Value::Instant(1_528_318_073_700)).unwrap();
store.put(&mut writer, "boolean", &Value::Bool(true)).unwrap();
store.put(&mut writer, "string", &Value::Str("hรฉllo, yรถu")).unwrap();
store.put(&mut writer, "json", &Value::Json(r#"{"foo":"bar", "number": 1}"#)).unwrap();
store.put(&mut writer, "blob", &Value::Blob(b"blob")).unwrap();
store
.put(&mut writer, "uint", &Value::U64(1234_u64))
.unwrap();
store
.put(&mut writer, "float", &Value::F64(1234.0.into()))
.unwrap();
store
.put(&mut writer, "instant", &Value::Instant(1_528_318_073_700))
.unwrap();
store
.put(&mut writer, "boolean", &Value::Bool(true))
.unwrap();
store
.put(&mut writer, "string", &Value::Str("hรฉllo, yรถu"))
.unwrap();
store
.put(
&mut writer,
"json",
&Value::Json(r#"{"foo":"bar", "number": 1}"#),
)
.unwrap();
store
.put(&mut writer, "blob", &Value::Blob(b"blob"))
.unwrap();
writer.commit().unwrap();
}
@ -88,15 +100,33 @@ fn main() {
{
let mut ids = Vec::new();
let mut writer = k.write().unwrap();
multistore.put(&mut writer, "str1", &Value::Str("string uno")).unwrap();
multistore.put(&mut writer, "str1", &Value::Str("string dos")).unwrap();
multistore.put(&mut writer, "str1", &Value::Str("string tres")).unwrap();
multistore.put(&mut writer, "str2", &Value::Str("string quatro")).unwrap();
multistore.put(&mut writer, "str2", &Value::Str("string cinco")).unwrap();
multistore.put(&mut writer, "str2", &Value::Str("string seis")).unwrap();
multistore.put(&mut writer, "str3", &Value::Str("string siete")).unwrap();
multistore.put(&mut writer, "str3", &Value::Str("string ocho")).unwrap();
multistore.put(&mut writer, "str3", &Value::Str("string nueve")).unwrap();
multistore
.put(&mut writer, "str1", &Value::Str("string uno"))
.unwrap();
multistore
.put(&mut writer, "str1", &Value::Str("string dos"))
.unwrap();
multistore
.put(&mut writer, "str1", &Value::Str("string tres"))
.unwrap();
multistore
.put(&mut writer, "str2", &Value::Str("string quatro"))
.unwrap();
multistore
.put(&mut writer, "str2", &Value::Str("string cinco"))
.unwrap();
multistore
.put(&mut writer, "str2", &Value::Str("string seis"))
.unwrap();
multistore
.put(&mut writer, "str3", &Value::Str("string siete"))
.unwrap();
multistore
.put(&mut writer, "str3", &Value::Str("string ocho"))
.unwrap();
multistore
.put(&mut writer, "str3", &Value::Str("string nueve"))
.unwrap();
getput(multistore, &mut writer, &mut ids);
writer.commit().unwrap();
let mut writer = k.write().unwrap();
@ -116,7 +146,10 @@ fn main() {
println!("Get string {:?}", store.get(&reader, "string").unwrap());
println!("Get json {:?}", store.get(&reader, "json").unwrap());
println!("Get blob {:?}", store.get(&reader, "blob").unwrap());
println!("Get non-existent {:?}", store.get(&reader, "non-existent").unwrap());
println!(
"Get non-existent {:?}",
store.get(&reader, "non-existent").unwrap()
);
}
println!("Looking up keys via Writer.get()...");
@ -125,11 +158,17 @@ fn main() {
store.put(&mut writer, "foo", &Value::Str("bar")).unwrap();
store.put(&mut writer, "bar", &Value::Str("baz")).unwrap();
store.delete(&mut writer, "foo").unwrap();
println!("It should be None! ({:?})", store.get(&writer, "foo").unwrap());
println!(
"It should be None! ({:?})",
store.get(&writer, "foo").unwrap()
);
println!("Get bar ({:?})", store.get(&writer, "bar").unwrap());
writer.commit().unwrap();
let reader = k.read().expect("reader");
println!("It should be None! ({:?})", store.get(&reader, "foo").unwrap());
println!(
"It should be None! ({:?})",
store.get(&reader, "foo").unwrap()
);
println!("Get bar {:?}", store.get(&reader, "bar").unwrap());
}
@ -141,7 +180,10 @@ fn main() {
writer.abort();
let reader = k.read().expect("reader");
println!("It should be None! ({:?})", store.get(&reader, "foo").unwrap());
println!(
"It should be None! ({:?})",
store.get(&reader, "foo").unwrap()
);
// Explicitly aborting a transaction is not required unless an early
// abort is desired, since both read and write transactions will
// implicitly be aborted once they go out of scope.
@ -153,7 +195,10 @@ fn main() {
let mut writer = k.write().unwrap();
store.put(&mut writer, "foo", &Value::Str("bar")).unwrap();
store.delete(&mut writer, "foo").unwrap();
println!("It should be None! ({:?})", store.get(&writer, "foo").unwrap());
println!(
"It should be None! ({:?})",
store.get(&writer, "foo").unwrap()
);
writer.commit().unwrap();
// Committing a transaction consumes the writer, preventing you
@ -172,20 +217,36 @@ fn main() {
writer.commit().unwrap();
let reader = k.read().expect("reader");
println!("It should be None! ({:?})", store.get(&reader, "foo").unwrap());
println!("It should be None! ({:?})", store.get(&reader, "bar").unwrap());
println!(
"It should be None! ({:?})",
store.get(&reader, "foo").unwrap()
);
println!(
"It should be None! ({:?})",
store.get(&reader, "bar").unwrap()
);
}
println!("Write and read on multiple stores...");
{
let another_store = k.open_single("another_store", StoreOptions::create()).unwrap();
let another_store = k
.open_single("another_store", StoreOptions::create())
.unwrap();
let mut writer = k.write().unwrap();
store.put(&mut writer, "foo", &Value::Str("bar")).unwrap();
another_store.put(&mut writer, "foo", &Value::Str("baz")).unwrap();
another_store
.put(&mut writer, "foo", &Value::Str("baz"))
.unwrap();
writer.commit().unwrap();
let reader = k.read().unwrap();
println!("Get from store value: {:?}", store.get(&reader, "foo").unwrap());
println!("Get from another store value: {:?}", another_store.get(&reader, "foo").unwrap());
println!(
"Get from store value: {:?}",
store.get(&reader, "foo").unwrap()
);
println!(
"Get from another store value: {:?}",
another_store.get(&reader, "foo").unwrap()
);
}
}

@ -19,38 +19,22 @@ pub use traits::*;
#[cfg(feature = "lmdb")]
pub use impl_lmdb::{
ArchMigrateError as LmdbArchMigrateError,
ArchMigrateResult as LmdbArchMigrateResult,
ArchMigrator as LmdbArchMigrator,
DatabaseFlagsImpl as LmdbDatabaseFlags,
DatabaseImpl as LmdbDatabase,
EnvironmentBuilderImpl as Lmdb,
EnvironmentFlagsImpl as LmdbEnvironmentFlags,
EnvironmentImpl as LmdbEnvironment,
ErrorImpl as LmdbError,
InfoImpl as LmdbInfo,
IterImpl as LmdbIter,
RoCursorImpl as LmdbRoCursor,
RoTransactionImpl as LmdbRoTransaction,
RwCursorImpl as LmdbRwCursor,
RwTransactionImpl as LmdbRwTransaction,
StatImpl as LmdbStat,
ArchMigrateError as LmdbArchMigrateError, ArchMigrateResult as LmdbArchMigrateResult,
ArchMigrator as LmdbArchMigrator, DatabaseFlagsImpl as LmdbDatabaseFlags,
DatabaseImpl as LmdbDatabase, EnvironmentBuilderImpl as Lmdb,
EnvironmentFlagsImpl as LmdbEnvironmentFlags, EnvironmentImpl as LmdbEnvironment,
ErrorImpl as LmdbError, InfoImpl as LmdbInfo, IterImpl as LmdbIter,
RoCursorImpl as LmdbRoCursor, RoTransactionImpl as LmdbRoTransaction,
RwCursorImpl as LmdbRwCursor, RwTransactionImpl as LmdbRwTransaction, StatImpl as LmdbStat,
WriteFlagsImpl as LmdbWriteFlags,
};
pub use impl_safe::{
DatabaseFlagsImpl as SafeModeDatabaseFlags,
DatabaseImpl as SafeModeDatabase,
EnvironmentBuilderImpl as SafeMode,
EnvironmentFlagsImpl as SafeModeEnvironmentFlags,
EnvironmentImpl as SafeModeEnvironment,
ErrorImpl as SafeModeError,
InfoImpl as SafeModeInfo,
IterImpl as SafeModeIter,
RoCursorImpl as SafeModeRoCursor,
RoTransactionImpl as SafeModeRoTransaction,
RwCursorImpl as SafeModeRwCursor,
RwTransactionImpl as SafeModeRwTransaction,
StatImpl as SafeModeStat,
DatabaseFlagsImpl as SafeModeDatabaseFlags, DatabaseImpl as SafeModeDatabase,
EnvironmentBuilderImpl as SafeMode, EnvironmentFlagsImpl as SafeModeEnvironmentFlags,
EnvironmentImpl as SafeModeEnvironment, ErrorImpl as SafeModeError, InfoImpl as SafeModeInfo,
IterImpl as SafeModeIter, RoCursorImpl as SafeModeRoCursor,
RoTransactionImpl as SafeModeRoTransaction, RwCursorImpl as SafeModeRwCursor,
RwTransactionImpl as SafeModeRwTransaction, StatImpl as SafeModeStat,
WriteFlagsImpl as SafeModeWriteFlags,
};

@ -21,29 +21,14 @@ mod stat;
mod transaction;
pub use arch_migrator::{
MigrateError as ArchMigrateError,
MigrateResult as ArchMigrateResult,
Migrator as ArchMigrator,
};
pub use cursor::{
RoCursorImpl,
RwCursorImpl,
MigrateError as ArchMigrateError, MigrateResult as ArchMigrateResult, Migrator as ArchMigrator,
};
pub use cursor::{RoCursorImpl, RwCursorImpl};
pub use database::DatabaseImpl;
pub use environment::{
EnvironmentBuilderImpl,
EnvironmentImpl,
};
pub use environment::{EnvironmentBuilderImpl, EnvironmentImpl};
pub use error::ErrorImpl;
pub use flags::{
DatabaseFlagsImpl,
EnvironmentFlagsImpl,
WriteFlagsImpl,
};
pub use flags::{DatabaseFlagsImpl, EnvironmentFlagsImpl, WriteFlagsImpl};
pub use info::InfoImpl;
pub use iter::IterImpl;
pub use stat::StatImpl;
pub use transaction::{
RoTransactionImpl,
RwTransactionImpl,
};
pub use transaction::{RoTransactionImpl, RwTransactionImpl};

@ -58,38 +58,18 @@
//! variants identify specific kinds of migration failures.
use std::{
collections::{
BTreeMap,
HashMap,
},
collections::{BTreeMap, HashMap},
convert::TryFrom,
fs::File,
io::{
Cursor,
Read,
Seek,
SeekFrom,
Write,
},
path::{
Path,
PathBuf,
},
io::{Cursor, Read, Seek, SeekFrom, Write},
path::{Path, PathBuf},
rc::Rc,
str,
};
use bitflags::bitflags;
use byteorder::{
LittleEndian,
ReadBytesExt,
};
use lmdb::{
DatabaseFlags,
Environment,
Transaction,
WriteFlags,
};
use byteorder::{LittleEndian, ReadBytesExt};
use lmdb::{DatabaseFlags, Environment, Transaction, WriteFlags};
pub use super::arch_migrator_error::MigrateError;
@ -278,9 +258,7 @@ impl Page {
match Self::parse_page_header(&mut cursor, bits)? {
PageHeader::Regular {
mp_flags,
pb_lower,
..
mp_flags, pb_lower, ..
} => {
if mp_flags.contains(PageFlags::LEAF2) || mp_flags.contains(PageFlags::SUBP) {
// We don't yet support DUPFIXED and DUPSORT databases.
@ -299,22 +277,21 @@ impl Page {
} else {
Err(MigrateError::UnexpectedPageHeaderVariant)
}
},
PageHeader::Overflow {
..
} => {
}
PageHeader::Overflow { .. } => {
// There isn't anything to do, nor should we try to instantiate
// a page of this type, as we only access them when reading
// a value that is too large to fit into a leaf node.
Err(MigrateError::UnexpectedPageHeaderVariant)
},
}
}
}
fn parse_page_header(cursor: &mut Cursor<&[u8]>, bits: Bits) -> MigrateResult<PageHeader> {
let mp_pgno = cursor.read_uint::<LittleEndian>(bits.size())?;
let _mp_pad = cursor.read_u16::<LittleEndian>()?;
let mp_flags = PageFlags::from_bits(cursor.read_u16::<LittleEndian>()?).ok_or(MigrateError::InvalidPageBits)?;
let mp_flags = PageFlags::from_bits(cursor.read_u16::<LittleEndian>()?)
.ok_or(MigrateError::InvalidPageBits)?;
if mp_flags.contains(PageFlags::OVERFLOW) {
let pb_pages = cursor.read_u32::<LittleEndian>()?;
@ -352,7 +329,11 @@ impl Page {
})
}
fn parse_leaf_nodes(cursor: &mut Cursor<&[u8]>, pb_lower: u16, bits: Bits) -> MigrateResult<Vec<LeafNode>> {
fn parse_leaf_nodes(
cursor: &mut Cursor<&[u8]>,
pb_lower: u16,
bits: Bits,
) -> MigrateResult<Vec<LeafNode>> {
cursor.set_position(page_header_size(bits));
let num_keys = Self::num_keys(pb_lower, bits);
let mp_ptrs = Self::parse_mp_ptrs(cursor, num_keys)?;
@ -373,7 +354,8 @@ impl Page {
let mn_lo = cursor.read_u16::<LittleEndian>()?;
let mn_hi = cursor.read_u16::<LittleEndian>()?;
let mn_flags = NodeFlags::from_bits(cursor.read_u16::<LittleEndian>()?).ok_or(MigrateError::InvalidNodeBits)?;
let mn_flags = NodeFlags::from_bits(cursor.read_u16::<LittleEndian>()?)
.ok_or(MigrateError::InvalidNodeBits)?;
let mn_ksize = cursor.read_u16::<LittleEndian>()?;
let start = usize::try_from(cursor.position())?;
@ -430,7 +412,11 @@ impl Page {
u32::from(mn_lo) + ((u32::from(mn_hi)) << 16)
}
fn parse_branch_nodes(cursor: &mut Cursor<&[u8]>, pb_lower: u16, bits: Bits) -> MigrateResult<Vec<BranchNode>> {
fn parse_branch_nodes(
cursor: &mut Cursor<&[u8]>,
pb_lower: u16,
bits: Bits,
) -> MigrateResult<Vec<BranchNode>> {
let num_keys = Self::num_keys(pb_lower, bits);
let mp_ptrs = Self::parse_mp_ptrs(cursor, num_keys)?;
@ -523,10 +509,7 @@ impl Migrator {
}
};
Ok(Migrator {
file,
bits,
})
Ok(Migrator { file, bits })
}
/// Dump the data in one of the databases in the LMDB environment. If the `database`
@ -549,8 +532,9 @@ impl Migrator {
let pairs;
if let Some(database) = database {
let subdbs = self.get_subdbs(root_page)?;
let database =
subdbs.get(database.as_bytes()).ok_or_else(|| MigrateError::DatabaseNotFound(database.to_string()))?;
let database = subdbs
.get(database.as_bytes())
.ok_or_else(|| MigrateError::DatabaseNotFound(database.to_string()))?;
let root_page_num = database.md_root;
let root_page = Rc::new(self.get_page(root_page_num)?);
pairs = self.get_pairs(root_page)?;
@ -658,22 +642,17 @@ impl Migrator {
for branch in nodes {
pages.push(Rc::new(self.get_page(branch.mp_pgno)?));
}
},
}
Page::LEAF(nodes) => {
for leaf in nodes {
if let LeafNode::SubData {
key,
db,
..
} = leaf
{
if let LeafNode::SubData { key, db, .. } = leaf {
subdbs.insert(key.to_vec(), db.clone());
};
}
},
}
_ => {
return Err(MigrateError::UnexpectedPageVariant);
},
}
}
}
@ -690,17 +669,13 @@ impl Migrator {
for branch in nodes {
pages.push(Rc::new(self.get_page(branch.mp_pgno)?));
}
},
}
Page::LEAF(nodes) => {
for leaf in nodes {
match leaf {
LeafNode::Regular {
key,
value,
..
} => {
LeafNode::Regular { key, value, .. } => {
pairs.insert(key.to_vec(), value.to_vec());
},
}
LeafNode::BigData {
mv_size,
key,
@ -711,14 +686,13 @@ impl Migrator {
// migration by waiting to read big data until it's time
// to write it to the new database.
let value = self.read_data(
*overflow_pgno * u64::from(PAGESIZE) + page_header_size(self.bits),
*overflow_pgno * u64::from(PAGESIZE)
+ page_header_size(self.bits),
*mv_size as usize,
)?;
pairs.insert(key.to_vec(), value);
},
LeafNode::SubData {
..
} => {
}
LeafNode::SubData { .. } => {
// We don't include subdatabase leaves in pairs, since
// there's no architecture-neutral representation of them,
// and in any case they're meta-data that should get
@ -728,13 +702,13 @@ impl Migrator {
// produced by `mdb_dump`, however, we could allow
// consumers to specify that they'd like to include these
// records.
},
}
};
}
},
}
_ => {
return Err(MigrateError::UnexpectedPageVariant);
},
}
}
}
@ -749,7 +723,10 @@ impl Migrator {
}
fn get_page(&mut self, page_no: u64) -> MigrateResult<Page> {
Page::new(self.read_data(page_no * u64::from(PAGESIZE), usize::from(PAGESIZE))?, self.bits)
Page::new(
self.read_data(page_no * u64::from(PAGESIZE), usize::from(PAGESIZE))?,
self.bits,
)
}
fn get_meta_data(&mut self) -> MigrateResult<MetaData> {
@ -769,7 +746,7 @@ impl Migrator {
return Err(MigrateError::InvalidDataVersion);
}
Ok(meta)
},
}
_ => Err(MigrateError::UnexpectedPageVariant),
}
}
@ -779,20 +756,10 @@ impl Migrator {
mod tests {
use super::*;
use std::{
env,
fs,
mem::size_of,
};
use std::{env, fs, mem::size_of};
use lmdb::{
Environment,
Error as LmdbError,
};
use tempfile::{
tempdir,
tempfile,
};
use lmdb::{Environment, Error as LmdbError};
use tempfile::{tempdir, tempfile};
fn compare_files(ref_file: &mut File, new_file: &mut File) -> MigrateResult<()> {
ref_file.seek(SeekFrom::Start(0))?;
@ -804,16 +771,14 @@ mod tests {
loop {
match ref_file.read(ref_buf) {
Err(err) => panic!("{}", err),
Ok(ref_len) => {
match new_file.read(new_buf) {
Err(err) => panic!("{}", err),
Ok(new_len) => {
assert_eq!(ref_len, new_len);
if ref_len == 0 {
break;
};
assert_eq!(ref_buf[0..ref_len], new_buf[0..new_len]);
},
Ok(ref_len) => match new_file.read(new_buf) {
Err(err) => panic!("{}", err),
Ok(new_len) => {
assert_eq!(ref_len, new_len);
if ref_len == 0 {
break;
};
assert_eq!(ref_buf[0..ref_len], new_buf[0..new_len]);
}
},
}
@ -855,7 +820,9 @@ mod tests {
migrator.dump(Some("subdb"), &new_dump_file)?;
// Open the reference dump file.
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"].iter().collect();
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"]
.iter()
.collect();
let mut ref_dump_file = File::open(ref_dump_file_path)?;
// Compare the new dump file to the reference dump file.
@ -897,7 +864,9 @@ mod tests {
migrator.dump(Some("subdb"), &new_dump_file)?;
// Open the reference dump file.
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"].iter().collect();
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"]
.iter()
.collect();
let mut ref_dump_file = File::open(ref_dump_file_path)?;
// Compare the new dump file to the reference dump file.
@ -923,7 +892,9 @@ mod tests {
migrator.dump(Some("subdb"), &new_dump_file)?;
// Open the reference dump file.
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"].iter().collect();
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"]
.iter()
.collect();
let mut ref_dump_file = File::open(ref_dump_file_path)?;
// Compare the new dump file to the reference dump file.
@ -949,7 +920,9 @@ mod tests {
migrator.dump(Some("subdb"), &new_dump_file)?;
// Open the reference dump file.
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"].iter().collect();
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"]
.iter()
.collect();
let mut ref_dump_file = File::open(ref_dump_file_path)?;
// Compare the new dump file to the reference dump file.
@ -971,8 +944,14 @@ mod tests {
let test_env_path: PathBuf = [cwd, "tests", "envs", test_env_name].iter().collect();
let old_env = tempdir()?;
fs::copy(test_env_path.join("data.mdb"), old_env.path().join("data.mdb"))?;
fs::copy(test_env_path.join("lock.mdb"), old_env.path().join("lock.mdb"))?;
fs::copy(
test_env_path.join("data.mdb"),
old_env.path().join("data.mdb"),
)?;
fs::copy(
test_env_path.join("lock.mdb"),
old_env.path().join("lock.mdb"),
)?;
// Confirm that it isn't possible to open the old environment with LMDB.
assert_eq!(
@ -994,7 +973,9 @@ mod tests {
migrator.dump(Some("subdb"), &new_dump_file)?;
// Open the reference dump file.
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"].iter().collect();
let ref_dump_file_path: PathBuf = [cwd, "tests", "envs", "ref_dump_subdb.txt"]
.iter()
.collect();
let mut ref_dump_file = File::open(ref_dump_file_path)?;
// Compare the new dump file to the reference dump file.
@ -1002,8 +983,14 @@ mod tests {
// Overwrite the old env's files with the new env's files and confirm that it's now
// possible to open the old env with LMDB.
fs::copy(new_env.path().join("data.mdb"), old_env.path().join("data.mdb"))?;
fs::copy(new_env.path().join("lock.mdb"), old_env.path().join("lock.mdb"))?;
fs::copy(
new_env.path().join("data.mdb"),
old_env.path().join("data.mdb"),
)?;
fs::copy(
new_env.path().join("lock.mdb"),
old_env.path().join("lock.mdb"),
)?;
assert!(Environment::new().open(old_env.path()).is_ok());
Ok(())

@ -8,11 +8,7 @@
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std::{
io,
num,
str,
};
use std::{io, num, str};
use thiserror::Error;

@ -10,32 +10,18 @@
use std::{
fs,
path::{
Path,
PathBuf,
},
path::{Path, PathBuf},
};
use lmdb::Error as LmdbError;
use super::{
DatabaseFlagsImpl,
DatabaseImpl,
EnvironmentFlagsImpl,
ErrorImpl,
InfoImpl,
RoTransactionImpl,
RwTransactionImpl,
StatImpl,
DatabaseFlagsImpl, DatabaseImpl, EnvironmentFlagsImpl, ErrorImpl, InfoImpl, RoTransactionImpl,
RwTransactionImpl, StatImpl,
};
use crate::backend::traits::{
BackendEnvironment,
BackendEnvironmentBuilder,
BackendInfo,
BackendIter,
BackendRoCursor,
BackendRoCursorTransaction,
BackendStat,
BackendEnvironment, BackendEnvironmentBuilder, BackendInfo, BackendIter, BackendRoCursor,
BackendRoCursorTransaction, BackendStat,
};
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
@ -112,7 +98,7 @@ impl<'b> BackendEnvironmentBuilder<'b> for EnvironmentBuilderImpl {
if !path.is_file() {
return Err(ErrorImpl::UnsuitableEnvironmentPath(path.into()));
}
},
}
EnvironmentPathType::SubDir => {
if !path.is_dir() {
if !self.make_dir_if_needed {
@ -120,12 +106,21 @@ impl<'b> BackendEnvironmentBuilder<'b> for EnvironmentBuilderImpl {
}
fs::create_dir_all(path)?;
}
},
}
}
self.builder.open(path).map_err(ErrorImpl::LmdbError).and_then(|lmdbenv| {
EnvironmentImpl::new(path, self.env_path_type, self.env_lock_type, self.env_db_type, lmdbenv)
})
self.builder
.open(path)
.map_err(ErrorImpl::LmdbError)
.and_then(|lmdbenv| {
EnvironmentImpl::new(
path,
self.env_path_type,
self.env_lock_type,
self.env_db_type,
lmdbenv,
)
})
}
}
@ -187,33 +182,54 @@ impl<'e> BackendEnvironment<'e> for EnvironmentImpl {
if self.env_db_type == EnvironmentDefaultDbType::SingleDatabase {
return Ok(vec![None]);
}
let db = self.lmdbenv.open_db(None).map(DatabaseImpl).map_err(ErrorImpl::LmdbError)?;
let db = self
.lmdbenv
.open_db(None)
.map(DatabaseImpl)
.map_err(ErrorImpl::LmdbError)?;
let reader = self.begin_ro_txn()?;
let cursor = reader.open_ro_cursor(&db)?;
let mut iter = cursor.into_iter();
let mut store = vec![];
while let Some(result) = iter.next() {
let (key, _) = result?;
let name = String::from_utf8(key.to_owned()).map_err(|_| ErrorImpl::LmdbError(lmdb::Error::Corrupted))?;
let name = String::from_utf8(key.to_owned())
.map_err(|_| ErrorImpl::LmdbError(lmdb::Error::Corrupted))?;
store.push(Some(name));
}
Ok(store)
}
fn open_db(&self, name: Option<&str>) -> Result<Self::Database, Self::Error> {
self.lmdbenv.open_db(name).map(DatabaseImpl).map_err(ErrorImpl::LmdbError)
self.lmdbenv
.open_db(name)
.map(DatabaseImpl)
.map_err(ErrorImpl::LmdbError)
}
fn create_db(&self, name: Option<&str>, flags: Self::Flags) -> Result<Self::Database, Self::Error> {
self.lmdbenv.create_db(name, flags.0).map(DatabaseImpl).map_err(ErrorImpl::LmdbError)
fn create_db(
&self,
name: Option<&str>,
flags: Self::Flags,
) -> Result<Self::Database, Self::Error> {
self.lmdbenv
.create_db(name, flags.0)
.map(DatabaseImpl)
.map_err(ErrorImpl::LmdbError)
}
fn begin_ro_txn(&'e self) -> Result<Self::RoTransaction, Self::Error> {
self.lmdbenv.begin_ro_txn().map(RoTransactionImpl).map_err(ErrorImpl::LmdbError)
self.lmdbenv
.begin_ro_txn()
.map(RoTransactionImpl)
.map_err(ErrorImpl::LmdbError)
}
fn begin_rw_txn(&'e self) -> Result<Self::RwTransaction, Self::Error> {
self.lmdbenv.begin_rw_txn().map(RwTransactionImpl).map_err(ErrorImpl::LmdbError)
self.lmdbenv
.begin_rw_txn()
.map(RwTransactionImpl)
.map_err(ErrorImpl::LmdbError)
}
fn sync(&self, force: bool) -> Result<(), Self::Error> {
@ -221,11 +237,17 @@ impl<'e> BackendEnvironment<'e> for EnvironmentImpl {
}
fn stat(&self) -> Result<Self::Stat, Self::Error> {
self.lmdbenv.stat().map(StatImpl).map_err(ErrorImpl::LmdbError)
self.lmdbenv
.stat()
.map(StatImpl)
.map_err(ErrorImpl::LmdbError)
}
fn info(&self) -> Result<Self::Info, Self::Error> {
self.lmdbenv.info().map(InfoImpl).map_err(ErrorImpl::LmdbError)
self.lmdbenv
.info()
.map(InfoImpl)
.map_err(ErrorImpl::LmdbError)
}
fn freelist(&self) -> Result<usize, Self::Error> {
@ -247,7 +269,9 @@ impl<'e> BackendEnvironment<'e> for EnvironmentImpl {
}
fn set_map_size(&self, size: usize) -> Result<(), Self::Error> {
self.lmdbenv.set_map_size(size).map_err(ErrorImpl::LmdbError)
self.lmdbenv
.set_map_size(size)
.map_err(ErrorImpl::LmdbError)
}
fn get_files_on_disk(&self) -> Vec<PathBuf> {

@ -8,16 +8,9 @@
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std::{
fmt,
io,
path::PathBuf,
};
use std::{fmt, io, path::PathBuf};
use crate::{
backend::traits::BackendError,
error::StoreError,
};
use crate::{backend::traits::BackendError, error::StoreError};
#[derive(Debug)]
pub enum ErrorImpl {
@ -49,7 +42,9 @@ impl Into<StoreError> for ErrorImpl {
ErrorImpl::LmdbError(lmdb::Error::DbsFull) => StoreError::DbsFull,
ErrorImpl::LmdbError(lmdb::Error::ReadersFull) => StoreError::ReadersFull,
ErrorImpl::LmdbError(error) => StoreError::LmdbError(error),
ErrorImpl::UnsuitableEnvironmentPath(path) => StoreError::UnsuitableEnvironmentPath(path),
ErrorImpl::UnsuitableEnvironmentPath(path) => {
StoreError::UnsuitableEnvironmentPath(path)
}
ErrorImpl::IoError(error) => StoreError::IoError(error),
}
}

@ -9,17 +9,8 @@
// specific language governing permissions and limitations under the License.
use crate::backend::{
common::{
DatabaseFlags,
EnvironmentFlags,
WriteFlags,
},
traits::{
BackendDatabaseFlags,
BackendEnvironmentFlags,
BackendFlags,
BackendWriteFlags,
},
common::{DatabaseFlags, EnvironmentFlags, WriteFlags},
traits::{BackendDatabaseFlags, BackendEnvironmentFlags, BackendFlags, BackendWriteFlags},
};
#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]

@ -22,12 +22,12 @@ pub struct IterImpl<'i, C> {
}
impl<'i, C> IterImpl<'i, C> {
pub(crate) fn new(mut cursor: C, to_iter: impl FnOnce(&mut C) -> lmdb::Iter<'i>) -> IterImpl<'i, C> {
pub(crate) fn new(
mut cursor: C,
to_iter: impl FnOnce(&mut C) -> lmdb::Iter<'i>,
) -> IterImpl<'i, C> {
let iter = to_iter(&mut cursor);
IterImpl {
cursor,
iter,
}
IterImpl { cursor, iter }
}
}

@ -10,16 +10,9 @@
use lmdb::Transaction;
use super::{
DatabaseImpl,
ErrorImpl,
RoCursorImpl,
WriteFlagsImpl,
};
use super::{DatabaseImpl, ErrorImpl, RoCursorImpl, WriteFlagsImpl};
use crate::backend::traits::{
BackendRoCursorTransaction,
BackendRoTransaction,
BackendRwCursorTransaction,
BackendRoCursorTransaction, BackendRoTransaction, BackendRwCursorTransaction,
BackendRwTransaction,
};
@ -43,7 +36,10 @@ impl<'t> BackendRoCursorTransaction<'t> for RoTransactionImpl<'t> {
type RoCursor = RoCursorImpl<'t>;
fn open_ro_cursor(&'t self, db: &Self::Database) -> Result<Self::RoCursor, Self::Error> {
self.0.open_ro_cursor(db.0).map(RoCursorImpl).map_err(ErrorImpl::LmdbError)
self.0
.open_ro_cursor(db.0)
.map(RoCursorImpl)
.map_err(ErrorImpl::LmdbError)
}
}
@ -59,8 +55,16 @@ impl<'t> BackendRwTransaction for RwTransactionImpl<'t> {
self.0.get(db.0, &key).map_err(ErrorImpl::LmdbError)
}
fn put(&mut self, db: &Self::Database, key: &[u8], value: &[u8], flags: Self::Flags) -> Result<(), Self::Error> {
self.0.put(db.0, &key, &value, flags.0).map_err(ErrorImpl::LmdbError)
fn put(
&mut self,
db: &Self::Database,
key: &[u8],
value: &[u8],
flags: Self::Flags,
) -> Result<(), Self::Error> {
self.0
.put(db.0, &key, &value, flags.0)
.map_err(ErrorImpl::LmdbError)
}
#[cfg(not(feature = "db-dup-sort"))]
@ -69,7 +73,12 @@ impl<'t> BackendRwTransaction for RwTransactionImpl<'t> {
}
#[cfg(feature = "db-dup-sort")]
fn del(&mut self, db: &Self::Database, key: &[u8], value: Option<&[u8]>) -> Result<(), Self::Error> {
fn del(
&mut self,
db: &Self::Database,
key: &[u8],
value: Option<&[u8]>,
) -> Result<(), Self::Error> {
self.0.del(db.0, &key, value).map_err(ErrorImpl::LmdbError)
}
@ -90,6 +99,9 @@ impl<'t> BackendRwCursorTransaction<'t> for RwTransactionImpl<'t> {
type RoCursor = RoCursorImpl<'t>;
fn open_ro_cursor(&'t self, db: &Self::Database) -> Result<Self::RoCursor, Self::Error> {
self.0.open_ro_cursor(db.0).map(RoCursorImpl).map_err(ErrorImpl::LmdbError)
self.0
.open_ro_cursor(db.0)
.map(RoCursorImpl)
.map_err(ErrorImpl::LmdbError)
}
}

@ -19,25 +19,12 @@ mod snapshot;
mod stat;
mod transaction;
pub use cursor::{
RoCursorImpl,
RwCursorImpl,
};
pub use cursor::{RoCursorImpl, RwCursorImpl};
pub use database::DatabaseImpl;
pub use environment::{
EnvironmentBuilderImpl,
EnvironmentImpl,
};
pub use environment::{EnvironmentBuilderImpl, EnvironmentImpl};
pub use error::ErrorImpl;
pub use flags::{
DatabaseFlagsImpl,
EnvironmentFlagsImpl,
WriteFlagsImpl,
};
pub use flags::{DatabaseFlagsImpl, EnvironmentFlagsImpl, WriteFlagsImpl};
pub use info::InfoImpl;
pub use iter::IterImpl;
pub use stat::StatImpl;
pub use transaction::{
RoTransactionImpl,
RwTransactionImpl,
};
pub use transaction::{RoTransactionImpl, RwTransactionImpl};

@ -8,10 +8,7 @@
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use super::{
snapshot::Snapshot,
IterImpl,
};
use super::{snapshot::Snapshot, IterImpl};
use crate::backend::traits::BackendRoCursor;
#[derive(Debug)]
@ -29,14 +26,18 @@ impl<'c> BackendRoCursor<'c> for RoCursorImpl<'c> {
where
K: AsRef<[u8]> + 'c,
{