Merge remote-tracking branch 'origin' into tyler_cf

master
Tyler Neely 10 years ago
commit fe0d4c5aeb
  1. 7
      .travis.yml
  2. 5
      Cargo.toml
  3. 14
      README.md
  4. 4
      src/ffi.rs
  5. 6
      src/lib.rs
  6. 14
      src/main.rs
  7. 15
      src/merge_operator.rs
  8. 33
      src/rocksdb.rs

@ -2,6 +2,11 @@ language: rust
rust:
- nightly
- 1.1.0
os:
- linux
- osx
script:
- rustc src/lib.rs
- rustc -Z parse-only src/lib.rs

@ -2,7 +2,7 @@
name = "rocksdb"
description = "A Rust wrapper for Facebook's RocksDB embeddable database."
version = "0.0.8"
version = "0.1.0"
authors = ["Tyler Neely <t@jujit.su>", "David Greenberg <dsg123456789@gmail.com>"]
license = "Apache-2.0"
exclude = [
@ -20,3 +20,6 @@ valgrind=[]
name = "test"
path = "test/test.rs"
[dependencies]
libc = "0.1.8"

@ -2,7 +2,7 @@ rust-rocksdb
============
[![Build Status](https://travis-ci.org/spacejam/rust-rocksdb.svg?branch=master)](https://travis-ci.org/spacejam/rust-rocksdb)
This library has been tested against RocksDB 3.8.1 on linux and OSX. The 0.0.8 crate should work with the Rust nightly release as of 7/18/15.
This library has been tested against RocksDB 3.8.1 on linux and OSX. The 0.1.0 crate should work with the Rust 1.1 stable and nightly releases as of 8/2/15.
### status
- [x] basic open/put/get/delete/close
@ -31,7 +31,7 @@ sudo make install
###### Cargo.toml
```rust
[dependencies]
rocksdb = "~0.0.8"
rocksdb = "~0.1.0"
```
###### Code
```rust
@ -112,9 +112,15 @@ use rocksdb::{Options, RocksDB, MergeOperands, Writable};
fn concat_merge(new_key: &[u8], existing_val: Option<&[u8]>,
operands: &mut MergeOperands) -> Vec<u8> {
let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0);
existing_val.map(|v| { result.extend(v) });
existing_val.map(|v| {
for e in v {
result.push(*e)
}
});
for op in operands {
result.extend(op);
for e in op {
result.push(*e)
}
}
result
}

@ -369,9 +369,9 @@ extern {
err: *mut *const i8
) -> RocksDBCFHandle;
pub fn rocksdb_drop_column_family(db: RocksDBInstance,
column_family_handle: *const RocksDBCFHandle,
column_family_handle: RocksDBCFHandle,
err: *mut *const i8);
pub fn rocksdb_column_family_handle_destroy(column_family_handle: *mut *const RocksDBCFHandle);
pub fn rocksdb_column_family_handle_destroy(column_family_handle: RocksDBCFHandle);
}

@ -15,12 +15,6 @@
*/
#![crate_id = "rocksdb"]
#![crate_type = "lib"]
#![feature(libc)]
#![feature(unique)]
#![feature(path_ext)]
#![feature(convert)]
#![feature(raw)]
#![allow(dead_code)]
pub use ffi as rocksdb_ffi;
pub use ffi::{new_bloom_filter, RocksDBCompactionStyle, RocksDBComparator};

@ -13,10 +13,7 @@
See the License for the specific language governing permissions and
limitations under the License.
*/
#![feature(test)]
extern crate rocksdb;
extern crate test;
use rocksdb::{Options, RocksDB, MergeOperands, new_bloom_filter, Writable, };
use rocksdb::RocksDBCompactionStyle::RocksDBUniversalCompaction;
@ -72,11 +69,15 @@ fn concat_merge(new_key: &[u8], existing_val: Option<&[u8]>,
mut operands: &mut MergeOperands) -> Vec<u8> {
let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0);
match existing_val {
Some(v) => result.extend(v),
Some(v) => for e in v {
result.push(*e)
},
None => (),
}
for op in operands {
result.extend(op);
for e in op {
result.push(*e);
}
}
result
}
@ -138,7 +139,6 @@ fn main() {
#[cfg(test)]
mod tests {
use test::Bencher;
use std::thread::sleep_ms;
use rocksdb::{BlockBasedOptions, Options, RocksDB, MergeOperands, new_bloom_filter, Writable };
@ -172,6 +172,7 @@ mod tests {
RocksDB::open(&opts, path).unwrap()
}
/* TODO(tyler) unstable
#[bench]
fn a_writes(b: &mut Bencher) {
// dirty hack due to parallel tests causing contention.
@ -205,4 +206,5 @@ mod tests {
}
RocksDB::destroy(&opts, path).is_ok();
}
*/
}

@ -131,7 +131,6 @@ impl MergeOperands {
impl<'a> Iterator for &'a mut MergeOperands {
type Item = &'a [u8];
fn next(&mut self) -> Option<&'a [u8]> {
use std::raw::Slice;
match self.cursor == self.num_operands {
true => None,
false => {
@ -145,8 +144,8 @@ impl<'a> Iterator for &'a mut MergeOperands {
let len = *len_ptr as usize;
let ptr = base + (spacing * self.cursor);
self.cursor += 1;
Some(mem::transmute(Slice{data:*(ptr as *const *const u8)
as *const u8, len: len}))
Some(mem::transmute(slice::from_raw_parts(*(ptr as *const *const u8)
as *const u8, len)))
}
}
}
@ -165,11 +164,17 @@ fn test_provided_merge(new_key: &[u8],
let nops = operands.size_hint().0;
let mut result: Vec<u8> = Vec::with_capacity(nops);
match existing_val {
Some(v) => result.extend(v),
Some(v) => {
for e in v {
result.push(*e);
}
},
None => (),
}
for op in operands {
result.extend(op);
for e in op {
result.push(*e);
}
}
result
}

@ -18,10 +18,9 @@ extern crate libc;
use self::libc::{c_void, size_t};
use std::collections::BTreeMap;
use std::ffi::{CString, CStr};
use std::fs::{self, PathExt};
use std::fs;
use std::ops::Deref;
use std::path::Path;
use std::ptr::Unique;
use std::slice;
use std::str::from_utf8;
@ -197,12 +196,10 @@ impl RocksDB {
let cpath_ptr = cpath.as_ptr();
let ospath = Path::new(path);
if !ospath.exists() {
match fs::create_dir_all(&ospath) {
Err(e) => return Err("Failed to create rocksdb directory.".to_string()),
Ok(_) => (),
}
}
let mut err: *const i8 = 0 as *const i8;
let err_ptr: *mut *const i8 = &mut err;
@ -242,14 +239,13 @@ impl RocksDB {
}).collect();
// Prepare to ship to C.
let names = cfnames.as_slice();
let copts: *const rocksdb_ffi::RocksDBOptions = cfopts.as_ptr();
let handles: *const rocksdb_ffi::RocksDBCFHandle = cfhandles.as_ptr();
let nfam = cfs_v.len();
unsafe {
db = rocksdb_ffi::rocksdb_open_column_families(opts.inner, cpath_ptr,
nfam as libc::c_int,
names.as_ptr(),
cfnames.as_ptr(),
copts, handles, err_ptr);
}
@ -279,10 +275,6 @@ impl RocksDB {
let cpath_ptr = cpath.as_ptr();
let ospath = Path::new(path);
if !ospath.exists() {
return Err("path does not exist".to_string());
}
let mut err: *const i8 = 0 as *const i8;
let err_ptr: *mut *const i8 = &mut err;
unsafe {
@ -299,10 +291,6 @@ impl RocksDB {
let cpath_ptr = cpath.as_ptr();
let ospath = Path::new(path);
if !ospath.exists() {
return Err("path does not exist".to_string());
}
let mut err: *const i8 = 0 as *const i8;
let err_ptr: *mut *const i8 = &mut err;
unsafe {
@ -384,7 +372,7 @@ impl RocksDB {
let mut err: *const i8 = 0 as *const i8;
let err_ptr: *mut *const i8 = &mut err;
unsafe {
rocksdb_ffi::rocksdb_drop_column_family(self.inner, cf.unwrap(), err_ptr);
rocksdb_ffi::rocksdb_drop_column_family(self.inner, *cf.unwrap(), err_ptr);
}
if !err.is_null() {
return Err(error_message(err));
@ -472,7 +460,12 @@ impl Drop for WriteBatch {
impl Drop for RocksDB {
fn drop(&mut self) {
unsafe { rocksdb_ffi::rocksdb_close(self.inner); }
unsafe {
for (_, cf) in self.cfs.iter() {
rocksdb_ffi::rocksdb_column_family_handle_destroy(*cf);
}
rocksdb_ffi::rocksdb_close(self.inner);
}
}
}
@ -535,21 +528,21 @@ impl ReadOptions {
}
pub struct RocksDBVector {
base: Unique<u8>,
base: *mut u8,
len: usize,
}
impl Deref for RocksDBVector {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe { slice::from_raw_parts(self.base.get(), self.len) }
unsafe { slice::from_raw_parts(self.base, self.len) }
}
}
impl Drop for RocksDBVector {
fn drop(&mut self) {
unsafe {
libc::free(*self.base.deref() as *mut libc::c_void);
libc::free(self.base as *mut libc::c_void);
}
}
}
@ -558,7 +551,7 @@ impl RocksDBVector {
pub fn from_c(val: *mut u8, val_len: size_t) -> RocksDBVector {
unsafe {
RocksDBVector {
base: Unique::new(val),
base: val,
len: val_len as usize,
}
}

Loading…
Cancel
Save