Commit 1953d1fa authored by Jay's avatar Jay Committed by GitHub

change width to default 100 (#47)

parent 4c1b1655
This diff is collapsed.
reorder_imports = true reorder_imports = true
max_width = 80
ideal_width = 80
...@@ -66,8 +66,7 @@ impl Drop for CompactionFilterHandle { ...@@ -66,8 +66,7 @@ impl Drop for CompactionFilterHandle {
} }
} }
pub unsafe fn new_compaction_filter pub unsafe fn new_compaction_filter(c_name: CString,
(c_name: CString,
ignore_snapshots: bool, ignore_snapshots: bool,
f: Box<CompactionFilter>) f: Box<CompactionFilter>)
-> Result<CompactionFilterHandle, String> { -> Result<CompactionFilterHandle, String> {
...@@ -75,8 +74,7 @@ pub unsafe fn new_compaction_filter ...@@ -75,8 +74,7 @@ pub unsafe fn new_compaction_filter
name: c_name, name: c_name,
filter: f, filter: f,
})); }));
let filter = let filter = rocksdb_ffi::rocksdb_compactionfilter_create(proxy as *mut c_void,
rocksdb_ffi::rocksdb_compactionfilter_create(proxy as *mut c_void,
destructor, destructor,
filter, filter,
name); name);
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
// //
use libc::{c_char, c_int, c_void, size_t}; use libc::{c_char, c_int, c_void, size_t};
use std::ffi::CString; use std::ffi::CString;
use std::mem; use std::mem;
...@@ -29,8 +30,7 @@ pub extern "C" fn destructor_callback(raw_cb: *mut c_void) { ...@@ -29,8 +30,7 @@ pub extern "C" fn destructor_callback(raw_cb: *mut c_void) {
pub extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char { pub extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char {
unsafe { unsafe {
let cb: &mut ComparatorCallback = let cb: &mut ComparatorCallback = &mut *(raw_cb as *mut ComparatorCallback);
&mut *(raw_cb as *mut ComparatorCallback);
let ptr = cb.name.as_ptr(); let ptr = cb.name.as_ptr();
ptr as *const c_char ptr as *const c_char
} }
...@@ -43,12 +43,9 @@ pub extern "C" fn compare_callback(raw_cb: *mut c_void, ...@@ -43,12 +43,9 @@ pub extern "C" fn compare_callback(raw_cb: *mut c_void,
b_len: size_t) b_len: size_t)
-> c_int { -> c_int {
unsafe { unsafe {
let cb: &mut ComparatorCallback = let cb: &mut ComparatorCallback = &mut *(raw_cb as *mut ComparatorCallback);
&mut *(raw_cb as *mut ComparatorCallback); let a: &[u8] = slice::from_raw_parts(a_raw as *const u8, a_len as usize);
let a: &[u8] = slice::from_raw_parts(a_raw as *const u8, let b: &[u8] = slice::from_raw_parts(b_raw as *const u8, b_len as usize);
a_len as usize);
let b: &[u8] = slice::from_raw_parts(b_raw as *const u8,
b_len as usize);
(cb.f)(a, b) (cb.f)(a, b)
} }
} }
...@@ -27,10 +27,8 @@ pub mod comparator; ...@@ -27,10 +27,8 @@ pub mod comparator;
mod compaction_filter; mod compaction_filter;
pub use compaction_filter::CompactionFilter; pub use compaction_filter::CompactionFilter;
pub use librocksdb_sys::{DBCompactionStyle, DBCompressionType, DBRecoveryMode, pub use librocksdb_sys::{DBCompactionStyle, DBCompressionType, DBRecoveryMode, new_bloom_filter,
new_bloom_filter, self as rocksdb_ffi}; self as rocksdb_ffi};
pub use merge_operator::MergeOperands; pub use merge_operator::MergeOperands;
pub use rocksdb::{DB, DBIterator, DBVector, Kv, SeekKey, Writable, WriteBatch, pub use rocksdb::{DB, DBIterator, DBVector, Kv, SeekKey, Writable, WriteBatch, CFHandle, Range};
CFHandle, Range}; pub use rocksdb_options::{BlockBasedOptions, Options, ReadOptions, WriteOptions};
pub use rocksdb_options::{BlockBasedOptions, Options, ReadOptions,
WriteOptions};
...@@ -66,10 +66,7 @@ fn main() { ...@@ -66,10 +66,7 @@ fn main() {
custom_merge(); custom_merge();
} }
fn concat_merge(_: &[u8], fn concat_merge(_: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Vec<u8> {
existing_val: Option<&[u8]>,
operands: &mut MergeOperands)
-> Vec<u8> {
let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0); let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0);
match existing_val { match existing_val {
Some(v) => { Some(v) => {
...@@ -152,8 +149,7 @@ mod tests { ...@@ -152,8 +149,7 @@ mod tests {
opts: &mut Options, opts: &mut Options,
blockopts: &mut BlockBasedOptions) blockopts: &mut BlockBasedOptions)
-> DB { -> DB {
let per_level_compression: [DBCompressionType; 7] = let per_level_compression: [DBCompressionType; 7] = [DBCompressionType::DBNo,
[DBCompressionType::DBNo,
DBCompressionType::DBNo, DBCompressionType::DBNo,
DBCompressionType::DBNo, DBCompressionType::DBNo,
DBCompressionType::DBLz4, DBCompressionType::DBLz4,
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
// //
use libc::{self, c_char, c_int, c_void, size_t}; use libc::{self, c_char, c_int, c_void, size_t};
use std::ffi::CString; use std::ffi::CString;
use std::mem; use std::mem;
...@@ -34,8 +35,7 @@ pub extern "C" fn destructor_callback(raw_cb: *mut c_void) { ...@@ -34,8 +35,7 @@ pub extern "C" fn destructor_callback(raw_cb: *mut c_void) {
pub extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char { pub extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char {
unsafe { unsafe {
let cb: &mut MergeOperatorCallback = let cb: &mut MergeOperatorCallback = &mut *(raw_cb as *mut MergeOperatorCallback);
&mut *(raw_cb as *mut MergeOperatorCallback);
let ptr = cb.name.as_ptr(); let ptr = cb.name.as_ptr();
ptr as *const c_char ptr as *const c_char
} }
...@@ -53,13 +53,9 @@ pub extern "C" fn full_merge_callback(raw_cb: *mut c_void, ...@@ -53,13 +53,9 @@ pub extern "C" fn full_merge_callback(raw_cb: *mut c_void,
new_value_length: *mut size_t) new_value_length: *mut size_t)
-> *const c_char { -> *const c_char {
unsafe { unsafe {
let cb: &mut MergeOperatorCallback = let cb: &mut MergeOperatorCallback = &mut *(raw_cb as *mut MergeOperatorCallback);
&mut *(raw_cb as *mut MergeOperatorCallback); let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands);
let operands = &mut MergeOperands::new(operands_list, let key: &[u8] = slice::from_raw_parts(raw_key as *const u8, key_len as usize);
operands_list_len,
num_operands);
let key: &[u8] = slice::from_raw_parts(raw_key as *const u8,
key_len as usize);
let oldval: &[u8] = slice::from_raw_parts(existing_value as *const u8, let oldval: &[u8] = slice::from_raw_parts(existing_value as *const u8,
existing_value_len as usize); existing_value_len as usize);
let mut result = (cb.merge_fn)(key, Some(oldval), operands); let mut result = (cb.merge_fn)(key, Some(oldval), operands);
...@@ -84,13 +80,9 @@ pub extern "C" fn partial_merge_callback(raw_cb: *mut c_void, ...@@ -84,13 +80,9 @@ pub extern "C" fn partial_merge_callback(raw_cb: *mut c_void,
new_value_length: *mut size_t) new_value_length: *mut size_t)
-> *const c_char { -> *const c_char {
unsafe { unsafe {
let cb: &mut MergeOperatorCallback = let cb: &mut MergeOperatorCallback = &mut *(raw_cb as *mut MergeOperatorCallback);
&mut *(raw_cb as *mut MergeOperatorCallback); let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands);
let operands = &mut MergeOperands::new(operands_list, let key: &[u8] = slice::from_raw_parts(raw_key as *const u8, key_len as usize);
operands_list_len,
num_operands);
let key: &[u8] = slice::from_raw_parts(raw_key as *const u8,
key_len as usize);
let mut result = (cb.merge_fn)(key, None, operands); let mut result = (cb.merge_fn)(key, None, operands);
result.shrink_to_fit(); result.shrink_to_fit();
// TODO(tan) investigate zero-copy techniques to improve performance // TODO(tan) investigate zero-copy techniques to improve performance
...@@ -137,13 +129,12 @@ impl<'a> Iterator for &'a mut MergeOperands { ...@@ -137,13 +129,12 @@ impl<'a> Iterator for &'a mut MergeOperands {
let base_len = self.operands_list_len as usize; let base_len = self.operands_list_len as usize;
let spacing = mem::size_of::<*const *const u8>(); let spacing = mem::size_of::<*const *const u8>();
let spacing_len = mem::size_of::<*const size_t>(); let spacing_len = mem::size_of::<*const size_t>();
let len_ptr = let len_ptr = (base_len + (spacing_len * self.cursor)) as *const size_t;
(base_len + (spacing_len * self.cursor)) as *const size_t;
let len = *len_ptr as usize; let len = *len_ptr as usize;
let ptr = base + (spacing * self.cursor); let ptr = base + (spacing * self.cursor);
self.cursor += 1; self.cursor += 1;
Some(mem::transmute(slice::from_raw_parts(*(ptr as *const *const u8) Some(mem::transmute(slice::from_raw_parts(*(ptr as *const *const u8) as *const u8,
as *const u8, len))) len)))
} }
} }
} }
...@@ -156,9 +147,9 @@ impl<'a> Iterator for &'a mut MergeOperands { ...@@ -156,9 +147,9 @@ impl<'a> Iterator for &'a mut MergeOperands {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*;
use rocksdb_options::Options;
use rocksdb::{DB, DBVector, Writable}; use rocksdb::{DB, DBVector, Writable};
use rocksdb_options::Options;
use super::*;
use tempdir::TempDir; use tempdir::TempDir;
#[allow(unused_variables)] #[allow(unused_variables)]
......
This diff is collapsed.
This diff is collapsed.
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
// //
use rocksdb::{DB, MergeOperands, Options, Writable}; use rocksdb::{DB, MergeOperands, Options, Writable};
use tempdir::TempDir; use tempdir::TempDir;
...@@ -114,11 +115,7 @@ pub fn test_column_family() { ...@@ -114,11 +115,7 @@ pub fn test_column_family() {
} }
// should b able to drop a cf // should b able to drop a cf
{ {
let mut db = DB::open_cf(&Options::new(), let mut db = DB::open_cf(&Options::new(), path_str, &["cf1"], &[&Options::new()]).unwrap();
path_str,
&["cf1"],
&[&Options::new()])
.unwrap();
match db.drop_cf("cf1") { match db.drop_cf("cf1") {
Ok(_) => println!("cf1 successfully dropped."), Ok(_) => println!("cf1 successfully dropped."),
Err(e) => panic!("failed to drop column family: {}", e), Err(e) => panic!("failed to drop column family: {}", e),
......
use tempdir::TempDir;
use rocksdb::{DB, Options, Range, Writable}; use rocksdb::{DB, Options, Range, Writable};use tempdir::TempDir;
#[test] #[test]
......
use tempdir::TempDir;
use std::sync::{Arc, RwLock};
use std::sync::atomic::{AtomicBool, Ordering};
use rocksdb::{Writable, DB, CompactionFilter, Options}; use rocksdb::{Writable, DB, CompactionFilter, Options};
use std::sync::{Arc, RwLock};
use std::sync::atomic::{AtomicBool, Ordering};use tempdir::TempDir;
struct Filter { struct Filter {
drop_called: Arc<AtomicBool>, drop_called: Arc<AtomicBool>,
...@@ -29,10 +29,13 @@ fn test_compaction_filter() { ...@@ -29,10 +29,13 @@ fn test_compaction_filter() {
let drop_called = Arc::new(AtomicBool::new(false)); let drop_called = Arc::new(AtomicBool::new(false));
let filtered_kvs = Arc::new(RwLock::new(vec![])); let filtered_kvs = Arc::new(RwLock::new(vec![]));
// set ignore_snapshots to false // set ignore_snapshots to false
opts.set_compaction_filter("test", false, Box::new(Filter { opts.set_compaction_filter("test",
false,
Box::new(Filter {
drop_called: drop_called.clone(), drop_called: drop_called.clone(),
filtered_kvs: filtered_kvs.clone(), filtered_kvs: filtered_kvs.clone(),
})).unwrap(); }))
.unwrap();
opts.create_if_missing(true); opts.create_if_missing(true);
let db = DB::open(&opts, path.path().to_str().unwrap()).unwrap(); let db = DB::open(&opts, path.path().to_str().unwrap()).unwrap();
let samples = vec![ let samples = vec![
...@@ -56,10 +59,13 @@ fn test_compaction_filter() { ...@@ -56,10 +59,13 @@ fn test_compaction_filter() {
drop(db); drop(db);
// reregister with ignore_snapshots set to true // reregister with ignore_snapshots set to true
opts.set_compaction_filter("test", true, Box::new(Filter { opts.set_compaction_filter("test",
true,
Box::new(Filter {
drop_called: drop_called.clone(), drop_called: drop_called.clone(),
filtered_kvs: filtered_kvs.clone(), filtered_kvs: filtered_kvs.clone(),
})).unwrap(); }))
.unwrap();
assert!(drop_called.load(Ordering::Relaxed)); assert!(drop_called.load(Ordering::Relaxed));
drop_called.store(false, Ordering::Relaxed); drop_called.store(false, Ordering::Relaxed);
{ {
......
...@@ -29,9 +29,8 @@ pub fn test_iterator() { ...@@ -29,9 +29,8 @@ pub fn test_iterator() {
assert!(p.is_ok()); assert!(p.is_ok());
let p = db.put(k3, v3); let p = db.put(k3, v3);
assert!(p.is_ok()); assert!(p.is_ok());
let expected = vec![(k1.to_vec(), v1.to_vec()), let expected =
(k2.to_vec(), v2.to_vec()), vec![(k1.to_vec(), v1.to_vec()), (k2.to_vec(), v2.to_vec()), (k3.to_vec(), v3.to_vec())];
(k3.to_vec(), v3.to_vec())];
let mut iter = db.iter(); let mut iter = db.iter();
...@@ -75,9 +74,8 @@ pub fn test_iterator() { ...@@ -75,9 +74,8 @@ pub fn test_iterator() {
assert_eq!(iter.collect::<Vec<_>>(), expected2); assert_eq!(iter.collect::<Vec<_>>(), expected2);
iter.seek(SeekKey::Key(k2)); iter.seek(SeekKey::Key(k2));
let expected = vec![(k2.to_vec(), v2.to_vec()), let expected =
(k3.to_vec(), v3.to_vec()), vec![(k2.to_vec(), v2.to_vec()), (k3.to_vec(), v3.to_vec()), (k4.to_vec(), v4.to_vec())];
(k4.to_vec(), v4.to_vec())];
assert_eq!(iter.collect::<Vec<_>>(), expected); assert_eq!(iter.collect::<Vec<_>>(), expected);
iter.seek(SeekKey::Key(k2)); iter.seek(SeekKey::Key(k2));
......
use rocksdb::{DB, Writable}; use rocksdb::{DB, Writable};
use std::thread;
use std::sync::Arc; use std::sync::Arc;
use std::thread;
use tempdir::TempDir; use tempdir::TempDir;
const N: usize = 100_000; const N: usize = 100_000;
......
use tempdir::TempDir;
use rocksdb::{DB, Options}; use rocksdb::{DB, Options};use tempdir::TempDir;
#[test] #[test]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment