Commit dab2ef77 authored by follitude's avatar follitude Committed by siddontang

*: format code (#121)

parent 4fd63913
# Complete list of style options can be found at:
# http://clang.llvm.org/docs/ClangFormatStyleOptions.html
---
BasedOnStyle: Google
...
...@@ -12,13 +12,8 @@ cache: ...@@ -12,13 +12,8 @@ cache:
os: os:
- linux - linux
# - osx
before_script:
- make -f travis-build/Makefile prepare-rustfmt
script: script:
- cargo fmt -- --write-mode diff || (echo please make format and run tests before creating a pr!; exit 1)
- cargo build --features static-link - cargo build --features static-link
- cargo test --all --features static-link - cargo test --all --features static-link
...@@ -26,4 +21,3 @@ env: ...@@ -26,4 +21,3 @@ env:
global: global:
- RUST_TEST_THREADS=1 - RUST_TEST_THREADS=1
- LD_LIBRARY_PATH: "/usr/local/lib" - LD_LIBRARY_PATH: "/usr/local/lib"
- RUSTFMT_VERSION=v0.6.0
...@@ -8,14 +8,13 @@ machine: ...@@ -8,14 +8,13 @@ machine:
CPPFLAGS: "-I$HOME/.local/include" CPPFLAGS: "-I$HOME/.local/include"
CXXFLAGS: "-I$HOME/.local/include" CXXFLAGS: "-I$HOME/.local/include"
PKG_CONFIG_PATH: "$PKG_CONFIG_PATH:$HOME/.local/lib/pkgconfig" PKG_CONFIG_PATH: "$PKG_CONFIG_PATH:$HOME/.local/lib/pkgconfig"
RUSTC_DATE: "2017-03-28" RUSTC_DATE: "2017-08-09"
LOCAL_PREFIX: "$HOME/.local" LOCAL_PREFIX: "$HOME/.local"
# used by cargo # used by cargo
LIBRARY_PATH: "$LIBRARY_PATH:$HOME/.local/lib" LIBRARY_PATH: "$LIBRARY_PATH:$HOME/.local/lib"
RUST_TEST_THREADS: 1 RUST_TEST_THREADS: 1
RUST_BACKTRACE: 1 RUST_BACKTRACE: 1
RUSTFLAGS: "-Dwarnings" RUSTFLAGS: "-Dwarnings"
RUSTFMT_VERSION: "v0.6.0"
pre: pre:
- | - |
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y; sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y;
......
extern crate gcc; extern crate gcc;
use gcc::Build;
use std::{env, fs, str}; use std::{env, fs, str};
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use gcc::Config;
macro_rules! t { macro_rules! t {
($e:expr) => (match $e { ($e:expr) => (match $e {
Ok(n) => n, Ok(n) => n,
...@@ -26,8 +25,8 @@ fn main() { ...@@ -26,8 +25,8 @@ fn main() {
println!("cargo:rustc-link-lib=static=crocksdb"); println!("cargo:rustc-link-lib=static=crocksdb");
} }
fn build_rocksdb() -> Config { fn build_rocksdb() -> Build {
let mut cfg = Config::new(); let mut cfg = Build::new();
if !cfg!(feature = "static-link") { if !cfg!(feature = "static-link") {
if cfg!(target_os = "windows") { if cfg!(target_os = "windows") {
...@@ -89,10 +88,12 @@ fn build_rocksdb() -> Config { ...@@ -89,10 +88,12 @@ fn build_rocksdb() -> Config {
} }
if let Err(e) = fs::rename(src.as_path(), dst.as_path()) { if let Err(e) = fs::rename(src.as_path(), dst.as_path()) {
panic!("failed to move {} to {}: {:?}", panic!(
"failed to move {} to {}: {:?}",
src.display(), src.display(),
dst.display(), dst.display(),
e); e
);
} }
} }
...@@ -124,23 +125,29 @@ fn build_rocksdb() -> Config { ...@@ -124,23 +125,29 @@ fn build_rocksdb() -> Config {
return cfg; return cfg;
} }
let output = let output = Command::new(p.as_path())
Command::new(p.as_path()).args(&["find_library", std_lib_name]).output().unwrap(); .args(&["find_library", std_lib_name])
.output()
.unwrap();
if output.status.success() && !output.stdout.is_empty() { if output.status.success() && !output.stdout.is_empty() {
if let Ok(path_str) = str::from_utf8(&output.stdout) { if let Ok(path_str) = str::from_utf8(&output.stdout) {
let path = PathBuf::from(path_str); let path = PathBuf::from(path_str);
if path.is_absolute() { if path.is_absolute() {
println!("cargo:rustc-link-lib=static=stdc++"); println!("cargo:rustc-link-lib=static=stdc++");
println!("cargo:rustc-link-search=native={}", println!(
path.parent().unwrap().display()); "cargo:rustc-link-search=native={}",
path.parent().unwrap().display()
);
cfg.cpp_link_stdlib(None); cfg.cpp_link_stdlib(None);
return cfg; return cfg;
} }
} }
} }
println!("failed to detect {}: {:?}, fallback to dynamic", println!(
"failed to detect {}: {:?}, fallback to dynamic",
std_lib_name, std_lib_name,
output); output
);
cfg cfg
} }
......
...@@ -628,7 +628,8 @@ crocksdb_compactionjobinfo_table_properties( ...@@ -628,7 +628,8 @@ crocksdb_compactionjobinfo_table_properties(
extern C_ROCKSDB_LIBRARY_API uint64_t extern C_ROCKSDB_LIBRARY_API uint64_t
crocksdb_compactionjobinfo_elapsed_micros(const crocksdb_compactionjobinfo_t*); crocksdb_compactionjobinfo_elapsed_micros(const crocksdb_compactionjobinfo_t*);
extern C_ROCKSDB_LIBRARY_API uint64_t extern C_ROCKSDB_LIBRARY_API uint64_t
crocksdb_compactionjobinfo_num_corrupt_keys(const crocksdb_compactionjobinfo_t*); crocksdb_compactionjobinfo_num_corrupt_keys(
const crocksdb_compactionjobinfo_t*);
/* External file ingestion info */ /* External file ingestion info */
......
This diff is collapsed.
use crocksdb_ffi::{self, DBCompactionFilter}; use crocksdb_ffi::{self, DBCompactionFilter};
use libc::{c_void, c_char, c_int, size_t}; use libc::{c_char, c_int, c_void, size_t};
use std::ffi::CString; use std::ffi::CString;
use std::slice; use std::slice;
...@@ -34,7 +34,8 @@ extern "C" fn destructor(filter: *mut c_void) { ...@@ -34,7 +34,8 @@ extern "C" fn destructor(filter: *mut c_void) {
} }
} }
extern "C" fn filter(filter: *mut c_void, extern "C" fn filter(
filter: *mut c_void,
level: c_int, level: c_int,
key: *const u8, key: *const u8,
key_len: size_t, key_len: size_t,
...@@ -42,8 +43,8 @@ extern "C" fn filter(filter: *mut c_void, ...@@ -42,8 +43,8 @@ extern "C" fn filter(filter: *mut c_void,
value_len: size_t, value_len: size_t,
_: *mut *mut u8, _: *mut *mut u8,
_: *mut size_t, _: *mut size_t,
value_changed: *mut bool) value_changed: *mut bool,
-> bool { ) -> bool {
unsafe { unsafe {
let filter = &mut *(filter as *mut CompactionFilterProxy); let filter = &mut *(filter as *mut CompactionFilterProxy);
let key = slice::from_raw_parts(key, key_len); let key = slice::from_raw_parts(key, key_len);
...@@ -65,18 +66,21 @@ impl Drop for CompactionFilterHandle { ...@@ -65,18 +66,21 @@ impl Drop for CompactionFilterHandle {
} }
} }
pub unsafe fn new_compaction_filter(c_name: CString, pub unsafe fn new_compaction_filter(
c_name: CString,
ignore_snapshots: bool, ignore_snapshots: bool,
f: Box<CompactionFilter>) f: Box<CompactionFilter>,
-> Result<CompactionFilterHandle, String> { ) -> Result<CompactionFilterHandle, String> {
let proxy = Box::into_raw(Box::new(CompactionFilterProxy { let proxy = Box::into_raw(Box::new(CompactionFilterProxy {
name: c_name, name: c_name,
filter: f, filter: f,
})); }));
let filter = crocksdb_ffi::crocksdb_compactionfilter_create(proxy as *mut c_void, let filter = crocksdb_ffi::crocksdb_compactionfilter_create(
proxy as *mut c_void,
destructor, destructor,
filter, filter,
name); name,
);
crocksdb_ffi::crocksdb_compactionfilter_set_ignore_snapshots(filter, ignore_snapshots); crocksdb_ffi::crocksdb_compactionfilter_set_ignore_snapshots(filter, ignore_snapshots);
Ok(CompactionFilterHandle { inner: filter }) Ok(CompactionFilterHandle { inner: filter })
} }
...@@ -36,12 +36,13 @@ pub extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char { ...@@ -36,12 +36,13 @@ pub extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char {
} }
} }
pub extern "C" fn compare_callback(raw_cb: *mut c_void, pub extern "C" fn compare_callback(
raw_cb: *mut c_void,
a_raw: *const c_char, a_raw: *const c_char,
a_len: size_t, a_len: size_t,
b_raw: *const c_char, b_raw: *const c_char,
b_len: size_t) b_len: size_t,
-> c_int { ) -> c_int {
unsafe { unsafe {
let cb: &mut ComparatorCallback = &mut *(raw_cb as *mut ComparatorCallback); let cb: &mut ComparatorCallback = &mut *(raw_cb as *mut ComparatorCallback);
let a: &[u8] = slice::from_raw_parts(a_raw as *const u8, a_len as usize); let a: &[u8] = slice::from_raw_parts(a_raw as *const u8, a_len as usize);
......
...@@ -13,10 +13,10 @@ ...@@ -13,10 +13,10 @@
use {TableProperties, TablePropertiesCollectionView}; use {TableProperties, TablePropertiesCollectionView};
use crocksdb_ffi::{self, DBInstance, DBFlushJobInfo, DBCompactionJobInfo, DBIngestionInfo, use crocksdb_ffi::{self, DBCompactionJobInfo, DBEventListener, DBFlushJobInfo, DBIngestionInfo,
DBEventListener}; DBInstance};
use libc::c_void; use libc::c_void;
use std::{slice, mem, str}; use std::{mem, slice, str};
use std::path::Path; use std::path::Path;
...@@ -98,8 +98,11 @@ impl IngestionInfo { ...@@ -98,8 +98,11 @@ impl IngestionInfo {
} }
pub fn internal_file_path(&self) -> &Path { pub fn internal_file_path(&self) -> &Path {
let p = let p = unsafe {
unsafe { fetch_str!(crocksdb_externalfileingestioninfo_internal_file_path(&self.0)) }; fetch_str!(crocksdb_externalfileingestioninfo_internal_file_path(
&self.0
))
};
Path::new(p) Path::new(p)
} }
...@@ -134,23 +137,29 @@ extern "C" fn destructor(ctx: *mut c_void) { ...@@ -134,23 +137,29 @@ extern "C" fn destructor(ctx: *mut c_void) {
// Maybe we should reuse db instance? // Maybe we should reuse db instance?
// TODO: refactor DB implement so that we can convert DBInstance to DB. // TODO: refactor DB implement so that we can convert DBInstance to DB.
extern "C" fn on_flush_completed(ctx: *mut c_void, extern "C" fn on_flush_completed(
ctx: *mut c_void,
_: *mut DBInstance, _: *mut DBInstance,
info: *const DBFlushJobInfo) { info: *const DBFlushJobInfo,
) {
let (ctx, info) = unsafe { (&*(ctx as *mut Box<EventListener>), mem::transmute(&*info)) }; let (ctx, info) = unsafe { (&*(ctx as *mut Box<EventListener>), mem::transmute(&*info)) };
ctx.on_flush_completed(info); ctx.on_flush_completed(info);
} }
extern "C" fn on_compaction_completed(ctx: *mut c_void, extern "C" fn on_compaction_completed(
ctx: *mut c_void,
_: *mut DBInstance, _: *mut DBInstance,
info: *const DBCompactionJobInfo) { info: *const DBCompactionJobInfo,
) {
let (ctx, info) = unsafe { (&*(ctx as *mut Box<EventListener>), mem::transmute(&*info)) }; let (ctx, info) = unsafe { (&*(ctx as *mut Box<EventListener>), mem::transmute(&*info)) };
ctx.on_compaction_completed(info); ctx.on_compaction_completed(info);
} }
extern "C" fn on_external_file_ingested(ctx: *mut c_void, extern "C" fn on_external_file_ingested(
ctx: *mut c_void,
_: *mut DBInstance, _: *mut DBInstance,
info: *const DBIngestionInfo) { info: *const DBIngestionInfo,
) {
let (ctx, info) = unsafe { (&*(ctx as *mut Box<EventListener>), mem::transmute(&*info)) }; let (ctx, info) = unsafe { (&*(ctx as *mut Box<EventListener>), mem::transmute(&*info)) };
ctx.on_external_file_ingested(info); ctx.on_external_file_ingested(info);
} }
...@@ -158,10 +167,12 @@ extern "C" fn on_external_file_ingested(ctx: *mut c_void, ...@@ -158,10 +167,12 @@ extern "C" fn on_external_file_ingested(ctx: *mut c_void,
pub fn new_event_listener<L: EventListener>(l: L) -> *mut DBEventListener { pub fn new_event_listener<L: EventListener>(l: L) -> *mut DBEventListener {
let p: Box<EventListener> = Box::new(l); let p: Box<EventListener> = Box::new(l);
unsafe { unsafe {
crocksdb_ffi::crocksdb_eventlistener_create(Box::into_raw(Box::new(p)) as *mut c_void, crocksdb_ffi::crocksdb_eventlistener_create(
Box::into_raw(Box::new(p)) as *mut c_void,
destructor, destructor,
on_flush_completed, on_flush_completed,
on_compaction_completed, on_compaction_completed,
on_external_file_ingested) on_external_file_ingested,
)
} }
} }
...@@ -32,16 +32,16 @@ mod table_properties_collector_factory; ...@@ -32,16 +32,16 @@ mod table_properties_collector_factory;
mod event_listener; mod event_listener;
pub use compaction_filter::CompactionFilter; pub use compaction_filter::CompactionFilter;
pub use event_listener::{EventListener, CompactionJobInfo, IngestionInfo, FlushJobInfo}; pub use event_listener::{CompactionJobInfo, EventListener, FlushJobInfo, IngestionInfo};
pub use librocksdb_sys::{DBCompactionStyle, DBCompressionType, DBRecoveryMode, DBInfoLogLevel, pub use librocksdb_sys::{self as crocksdb_ffi, new_bloom_filter, CompactionPriority,
DBStatisticsTickerType, DBStatisticsHistogramType, new_bloom_filter, DBCompactionStyle, DBCompressionType, DBEntryType, DBInfoLogLevel,
CompactionPriority, DBEntryType, self as crocksdb_ffi}; DBRecoveryMode, DBStatisticsHistogramType, DBStatisticsTickerType};
pub use merge_operator::MergeOperands; pub use merge_operator::MergeOperands;
pub use rocksdb::{DB, DBIterator, DBVector, Kv, SeekKey, Writable, WriteBatch, CFHandle, Range, pub use rocksdb::{BackupEngine, CFHandle, DBIterator, DBVector, Kv, Range, SeekKey, SstFileWriter,
BackupEngine, SstFileWriter}; Writable, WriteBatch, DB};
pub use rocksdb_options::{BlockBasedOptions, DBOptions, ColumnFamilyOptions, ReadOptions, pub use rocksdb_options::{BlockBasedOptions, ColumnFamilyOptions, CompactOptions, DBOptions,
WriteOptions, RestoreOptions, IngestExternalFileOptions, EnvOptions, EnvOptions, HistogramData, IngestExternalFileOptions, ReadOptions,
HistogramData, CompactOptions}; RestoreOptions, WriteOptions};
pub use slice_transform::SliceTransform; pub use slice_transform::SliceTransform;
pub use table_properties::{TableProperties, TablePropertiesCollection, pub use table_properties::{TableProperties, TablePropertiesCollection,
TablePropertiesCollectionView, UserCollectedProperties}; TablePropertiesCollectionView, UserCollectedProperties};
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
// //
extern crate rocksdb; extern crate rocksdb;
use rocksdb::{DB, MergeOperands, DBOptions, Writable, ColumnFamilyOptions}; use rocksdb::{ColumnFamilyOptions, DBOptions, MergeOperands, Writable, DB};
// fn snapshot_test() { // fn snapshot_test() {
// let path = "_rust_rocksdb_iteratortest"; // let path = "_rust_rocksdb_iteratortest";
...@@ -52,12 +52,10 @@ fn main() { ...@@ -52,12 +52,10 @@ fn main() {
let db = DB::open_default(path).unwrap(); let db = DB::open_default(path).unwrap();
assert!(db.put(b"my key", b"my value").is_ok()); assert!(db.put(b"my key", b"my value").is_ok());
match db.get(b"my key") { match db.get(b"my key") {
Ok(Some(value)) => { Ok(Some(value)) => match value.to_utf8() {
match value.to_utf8() {
Some(v) => println!("retrieved utf8 value: {}", v), Some(v) => println!("retrieved utf8 value: {}", v),
None => println!("did not read valid utf-8 out of the db"), None => println!("did not read valid utf-8 out of the db"),
} },
}
Ok(None) => panic!("value not present!"), Ok(None) => panic!("value not present!"),
Err(e) => println!("error retrieving value: {}", e), Err(e) => println!("error retrieving value: {}", e),
} }
...@@ -70,11 +68,9 @@ fn main() { ...@@ -70,11 +68,9 @@ fn main() {
fn concat_merge(_: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Vec<u8> { fn concat_merge(_: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Vec<u8> {
let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0); let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0);
match existing_val { match existing_val {
Some(v) => { Some(v) => for e in v {
for e in v {
result.push(*e) result.push(*e)
} },
}
None => (), None => (),
} }
for op in operands { for op in operands {
...@@ -100,12 +96,10 @@ fn custom_merge() { ...@@ -100,12 +96,10 @@ fn custom_merge() {
db.merge(b"k1", b"efg").unwrap(); db.merge(b"k1", b"efg").unwrap();
db.merge(b"k1", b"h").unwrap(); db.merge(b"k1", b"h").unwrap();
match db.get(b"k1") { match db.get(b"k1") {
Ok(Some(value)) => { Ok(Some(value)) => match value.to_utf8() {
match value.to_utf8() {
Some(v) => println!("retrieved utf8 value: {}", v), Some(v) => println!("retrieved utf8 value: {}", v),
None => println!("did not read valid utf-8 out of the db"), None => println!("did not read valid utf-8 out of the db"),
} },
}
Ok(None) => panic!("value not present!"), Ok(None) => panic!("value not present!"),
Err(e) => println!("error retrieving value: {}", e), Err(e) => println!("error retrieving value: {}", e),
} }
...@@ -116,22 +110,25 @@ fn custom_merge() { ...@@ -116,22 +110,25 @@ fn custom_merge() {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use rocksdb::{BlockBasedOptions, DB, DBCompressionType, ColumnFamilyOptions, DBOptions}; use rocksdb::{BlockBasedOptions, ColumnFamilyOptions, DBCompressionType, DBOptions, DB};
use rocksdb::DBCompactionStyle; use rocksdb::DBCompactionStyle;
use rocksdb::DBRecoveryMode; use rocksdb::DBRecoveryMode;
#[allow(dead_code)] #[allow(dead_code)]
fn tuned_for_somebody_elses_disk(path: &str, fn tuned_for_somebody_elses_disk(
path: &str,
mut opts: DBOptions, mut opts: DBOptions,
blockopts: &mut BlockBasedOptions) blockopts: &mut BlockBasedOptions,
-> DB { ) -> DB {
let per_level_compression: [DBCompressionType; 7] = [DBCompressionType::No, let per_level_compression: [DBCompressionType; 7] = [
DBCompressionType::No, DBCompressionType::No,
DBCompressionType::No, DBCompressionType::No,
DBCompressionType::No,
DBCompressionType::Lz4,
DBCompressionType::Lz4, DBCompressionType::Lz4,
DBCompressionType::Lz4, DBCompressionType::Lz4,
DBCompressionType::Lz4, DBCompressionType::Lz4,
DBCompressionType::Lz4]; ];
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
opts.set_max_open_files(10000); opts.set_max_open_files(10000);
......
...@@ -41,7 +41,8 @@ pub extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char { ...@@ -41,7 +41,8 @@ pub extern "C" fn name_callback(raw_cb: *mut c_void) -> *const c_char {
} }
} }
pub extern "C" fn full_merge_callback(raw_cb: *mut c_void, pub extern "C" fn full_merge_callback(
raw_cb: *mut c_void,
raw_key: *const c_char, raw_key: *const c_char,
key_len: size_t, key_len: size_t,
existing_value: *const c_char, existing_value: *const c_char,
...@@ -50,14 +51,14 @@ pub extern "C" fn full_merge_callback(raw_cb: *mut c_void, ...@@ -50,14 +51,14 @@ pub extern "C" fn full_merge_callback(raw_cb: *mut c_void,
operands_list_len: *const size_t, operands_list_len: *const size_t,
num_operands: c_int, num_operands: c_int,
success: *mut u8, success: *mut u8,
new_value_length: *mut size_t) new_value_length: *mut size_t,
-> *const c_char { ) -> *const c_char {
unsafe { unsafe {
let cb: &mut MergeOperatorCallback = &mut *(raw_cb as *mut MergeOperatorCallback); let cb: &mut MergeOperatorCallback = &mut *(raw_cb as *mut MergeOperatorCallback);
let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands); let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands);
let key: &[u8] = slice::from_raw_parts(raw_key as *const u8, key_len as usize); let key: &[u8] = slice::from_raw_parts(raw_key as *const u8, key_len as usize);
let oldval: &[u8] = slice::from_raw_parts(existing_value as *const u8, let oldval: &[u8] =
existing_value_len as usize); slice::from_raw_parts(existing_value as *const u8, existing_value_len as usize);
let mut result = (cb.merge_fn)(key, Some(oldval), operands); let mut result = (cb.merge_fn)(key, Some(oldval), operands);
result.shrink_to_fit(); result.shrink_to_fit();
// TODO(tan) investigate zero-copy techniques to improve performance // TODO(tan) investigate zero-copy techniques to improve performance
...@@ -70,15 +71,16 @@ pub extern "C" fn full_merge_callback(raw_cb: *mut c_void, ...@@ -70,15 +71,16 @@ pub extern "C" fn full_merge_callback(raw_cb: *mut c_void,
} }
} }
pub extern "C" fn partial_merge_callback(raw_cb: *mut c_void, pub extern "C" fn partial_merge_callback(
raw_cb: *mut c_void,
raw_key: *const c_char, raw_key: *const c_char,
key_len: size_t, key_len: size_t,
operands_list: *const *const c_char, operands_list: *const *const c_char,
operands_list_len: *const size_t, operands_list_len: *const size_t,
num_operands: c_int, num_operands: c_int,
success: *mut u8, success: *mut u8,
new_value_length: *mut size_t) new_value_length: *mut size_t,
-> *const c_char { ) -> *const c_char {
unsafe { unsafe {
let cb: &mut MergeOperatorCallback = &mut *(raw_cb as *mut MergeOperatorCallback); let cb: &mut MergeOperatorCallback = &mut *(raw_cb as *mut MergeOperatorCallback);
let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands); let operands = &mut MergeOperands::new(operands_list, operands_list_len, num_operands);
...@@ -104,10 +106,11 @@ pub struct MergeOperands { ...@@ -104,10 +106,11 @@ pub struct MergeOperands {
} }
impl MergeOperands { impl MergeOperands {
fn new(operands_list: *const *const c_char, fn new(
operands_list: *const *const c_char,
operands_list_len: *const size_t, operands_list_len: *const size_t,
num_operands: c_int) num_operands: c_int,
-> MergeOperands { ) -> MergeOperands {
assert!(num_operands >= 0); assert!(num_operands >= 0);
MergeOperands { MergeOperands {
operands_list: operands_list, operands_list: operands_list,
...@@ -133,8 +136,10 @@ impl<'a> Iterator for &'a mut MergeOperands { ...@@ -133,8 +136,10 @@ impl<'a> Iterator for &'a mut MergeOperands {
let len = *len_ptr as usize; let len = *len_ptr as usize;
let ptr = base + (spacing * self.cursor); let ptr = base + (spacing * self.cursor);
self.cursor += 1; self.cursor += 1;
Some(mem::transmute(slice::from_raw_parts(*(ptr as *const *const u8) as *const u8, Some(mem::transmute(slice::from_raw_parts(
len))) *(ptr as *const *const u8) as *const u8,
len,
)))
} }
} }
} }
...@@ -147,17 +152,18 @@ impl<'a> Iterator for &'a mut MergeOperands { ...@@ -147,17 +152,18 @@ impl<'a> Iterator for &'a mut MergeOperands {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use rocksdb::{DB, DBVector, Writable};
use rocksdb_options::{DBOptions, ColumnFamilyOptions};
use super::*; use super::*;
use rocksdb::{DBVector, Writable, DB};
use rocksdb_options::{ColumnFamilyOptions, DBOptions};
use tempdir::TempDir; use tempdir::TempDir;
#[allow(unused_variables)] #[allow(unused_variables)]
#[allow(dead_code)] #[allow(dead_code)]
fn test_provided_merge(new_key: &[u8], fn test_provided_merge(
new_key: &[u8],
existing_val: Option<&[u8]>, existing_val: Option<&[u8]>,
operands: &mut MergeOperands) operands: &mut MergeOperands,
-> Vec<u8> { ) -> Vec<u8> {
let nops = operands.size_hint().0; let nops = operands.size_hint().0;
let mut result: Vec<u8> = Vec::with_capacity(nops); let mut result: Vec<u8> = Vec::with_capacity(nops);
if let Some(v) = existing_val { if let Some(v) = existing_val {
...@@ -181,11 +187,12 @@ mod test { ...@@ -181,11 +187,12 @@ mod test {
opts.create_if_missing(true); opts.create_if_missing(true);
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
cf_opts.add_merge_operator("test operator", test_provided_merge); cf_opts.add_merge_operator("test operator", test_provided_merge);
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
let p = db.put(b"k1", b"a"); let p = db.put(b"k1", b"a");
assert!(p.is_ok()); assert!(p.is_ok());
let _ = db.merge(b"k1", b"b"); let _ = db.merge(b"k1", b"b");
...@@ -195,12 +202,10 @@ mod test { ...@@ -195,12 +202,10 @@ mod test {
let m = db.merge(b"k1", b"h"); let m = db.merge(b"k1", b"h");
assert!(m.is_ok()); assert!(m.is_ok());
match db.get(b"k1") { match db.get(b"k1") {
Ok(Some(value)) => { Ok(Some(value)) => match value.to_utf8() {
match value.to_utf8() {
Some(v) => println!("retrieved utf8 value: {}", v), Some(v) => println!("retrieved utf8 value: {}", v),
None => println!("did not read valid utf-8 out of the db"), None => println!("did not read valid utf-8 out of the db"),
} },
}
Err(e) => println!("error reading value {:?}", e), Err(e) => println!("error reading value {:?}", e),
_ => panic!("value not present"), _ => panic!("value not present"),
} }
......
This diff is collapsed.
This diff is collapsed.
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
use crocksdb_ffi::{self, DBSliceTransform}; use crocksdb_ffi::{self, DBSliceTransform};
use libc::{c_void, c_char, size_t}; use libc::{c_char, c_void, size_t};
use std::ffi::CString; use std::ffi::CString;
use std::slice; use std::slice;
...@@ -54,11 +54,12 @@ extern "C" fn destructor(transform: *mut c_void) { ...@@ -54,11 +54,12 @@ extern "C" fn destructor(transform: *mut c_void) {
} }
} }
extern "C" fn transform(transform: *mut c_void, extern "C" fn transform(
transform: *mut c_void,
key: *const u8, key: *const u8,
key_len: size_t, key_len: size_t,
dest_len: *mut size_t) dest_len: *mut size_t,
-> *const u8 { ) -> *const u8 {
unsafe { unsafe {
let transform = &mut *(transform as *mut SliceTransformProxy); let transform = &mut *(transform as *mut SliceTransformProxy);
let key = slice::from_raw_parts(key, key_len); let key = slice::from_raw_parts(key, key_len);
...@@ -85,18 +86,21 @@ extern "C" fn in_range(transform: *mut c_void, key: *const u8, key_len: size_t) ...@@ -85,18 +86,21 @@ extern "C" fn in_range(transform: *mut c_void, key: *const u8, key_len: size_t)
} }
pub unsafe fn new_slice_transform(c_name: CString, pub unsafe fn new_slice_transform(
f: Box<SliceTransform>) c_name: CString,
-> Result<*mut DBSliceTransform, String> { f: Box<SliceTransform>,
) -> Result<*mut DBSliceTransform, String> {
let proxy = Box::into_raw(Box::new(SliceTransformProxy { let proxy = Box::into_raw(Box::new(SliceTransformProxy {
name: c_name, name: c_name,
transform: f, transform: f,
})); }));
let transform = crocksdb_ffi::crocksdb_slicetransform_create(proxy as *mut c_void, let transform = crocksdb_ffi::crocksdb_slicetransform_create(
proxy as *mut c_void,
destructor, destructor,
transform, transform,
in_domain, in_domain,
in_range, in_range,
name); name,
);
Ok(transform) Ok(transform)
} }
...@@ -12,20 +12,21 @@ ...@@ -12,20 +12,21 @@
// limitations under the License. // limitations under the License.
use crocksdb_ffi::{self, DBTableProperties, DBTableProperty, DBUserCollectedPropertiesIterator, use crocksdb_ffi::{self, DBTableProperties, DBTablePropertiesCollection,
DBTablePropertiesCollection, DBTablePropertiesCollectionIterator, DBTablePropertiesCollectionIterator, DBTableProperty,
DBUserCollectedProperties}; DBUserCollectedProperties, DBUserCollectedPropertiesIterator};
use libc::size_t; use libc::size_t;
use std::{slice, str, mem}; use std::{mem, slice, str};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::ops::{Index, Deref}; use std::ops::{Deref, Index};
pub struct TablePropertiesCollectionView(DBTablePropertiesCollection); pub struct TablePropertiesCollectionView(DBTablePropertiesCollection);
impl TablePropertiesCollectionView { impl TablePropertiesCollectionView {
pub unsafe fn from_ptr<'a>(collection: *const DBTablePropertiesCollection) pub unsafe fn from_ptr<'a>(
-> &'a TablePropertiesCollectionView { collection: *const DBTablePropertiesCollection,
) -> &'a TablePropertiesCollectionView {
let c = &*collection; let c = &*collection;
mem::transmute(c) mem::transmute(c)
} }
...@@ -86,8 +87,8 @@ impl<'a> Iterator for TablePropertiesCollectionIter<'a> { ...@@ -86,8 +87,8 @@ impl<'a> Iterator for TablePropertiesCollectionIter<'a> {
} }
let mut klen: size_t = 0; let mut klen: size_t = 0;
let k = crocksdb_ffi::crocksdb_table_properties_collection_iter_key(self.inner, let k =
&mut klen); crocksdb_ffi::crocksdb_table_properties_collection_iter_key(self.inner, &mut klen);
let bytes = slice::from_raw_parts(k, klen); let bytes = slice::from_raw_parts(k, klen);
let key = str::from_utf8(bytes).unwrap(); let key = str::from_utf8(bytes).unwrap();
let props = crocksdb_ffi::crocksdb_table_properties_collection_iter_value(self.inner); let props = crocksdb_ffi::crocksdb_table_properties_collection_iter_value(self.inner);
...@@ -237,10 +238,12 @@ impl UserCollectedProperties { ...@@ -237,10 +238,12 @@ impl UserCollectedProperties {
let bytes = index.as_ref(); let bytes = index.as_ref();
let mut size = 0; let mut size = 0;
unsafe { unsafe {
let ptr = crocksdb_ffi::crocksdb_user_collected_properties_get(&self.inner, let ptr = crocksdb_ffi::crocksdb_user_collected_properties_get(
&self.inner,
bytes.as_ptr(), bytes.as_ptr(),
bytes.len(), bytes.len(),
&mut size); &mut size,
);
if ptr.is_null() { if ptr.is_null() {
return None; return None;
} }
...@@ -262,7 +265,8 @@ impl<Q: AsRef<[u8]>> Index<Q> for UserCollectedProperties { ...@@ -262,7 +265,8 @@ impl<Q: AsRef<[u8]>> Index<Q> for UserCollectedProperties {
fn index(&self, index: Q) -> &[u8] { fn index(&self, index: Q) -> &[u8] {
let key = index.as_ref(); let key = index.as_ref();
self.get(key).unwrap_or_else(|| panic!("no entry found for key {:?}", key)) self.get(key)
.unwrap_or_else(|| panic!("no entry found for key {:?}", key))
} }
} }
...@@ -308,13 +312,13 @@ impl<'a> Iterator for UserCollectedPropertiesIter<'a> { ...@@ -308,13 +312,13 @@ impl<'a> Iterator for UserCollectedPropertiesIter<'a> {
return None; return None;
} }
let mut klen: size_t = 0; let mut klen: size_t = 0;
let k = crocksdb_ffi::crocksdb_user_collected_properties_iter_key(self.inner, let k =
&mut klen); crocksdb_ffi::crocksdb_user_collected_properties_iter_key(self.inner, &mut klen);
let key = slice::from_raw_parts(k, klen); let key = slice::from_raw_parts(k, klen);
let mut vlen: size_t = 0; let mut vlen: size_t = 0;
let v = crocksdb_ffi::crocksdb_user_collected_properties_iter_value(self.inner, let v =
&mut vlen); crocksdb_ffi::crocksdb_user_collected_properties_iter_value(self.inner, &mut vlen);
let val = slice::from_raw_parts(v, vlen); let val = slice::from_raw_parts(v, vlen);
crocksdb_ffi::crocksdb_user_collected_properties_iter_next(self.inner); crocksdb_ffi::crocksdb_user_collected_properties_iter_next(self.inner);
......
...@@ -11,8 +11,8 @@ ...@@ -11,8 +11,8 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use crocksdb_ffi::{self, DBEntryType, DBUserCollectedProperties, DBTablePropertiesCollector}; use crocksdb_ffi::{self, DBEntryType, DBTablePropertiesCollector, DBUserCollectedProperties};
use libc::{c_void, c_char, c_int, uint8_t, uint64_t, size_t}; use libc::{c_char, c_int, c_void, size_t, uint64_t, uint8_t};
use std::collections::HashMap; use std::collections::HashMap;
use std::ffi::CString; use std::ffi::CString;
use std::mem; use std::mem;
...@@ -26,12 +26,7 @@ use std::slice; ...@@ -26,12 +26,7 @@ use std::slice;
/// TablePropertiesCollector object per table and then call it sequentially /// TablePropertiesCollector object per table and then call it sequentially
pub trait TablePropertiesCollector { pub trait TablePropertiesCollector {
/// Will be called when a new key/value pair is inserted into the table. /// Will be called when a new key/value pair is inserted into the table.
fn add(&mut self, fn add(&mut self, key: &[u8], value: &[u8], entry_type: DBEntryType, seq: u64, file_size: u64);
key: &[u8],
value: &[u8],
entry_type: DBEntryType,
seq: u64,
file_size: u64);
/// Will be called when a table has already been built and is ready for /// Will be called when a table has already been built and is ready for
/// writing the properties block. /// writing the properties block.
...@@ -65,19 +60,23 @@ extern "C" fn destruct(handle: *mut c_void) { ...@@ -65,19 +60,23 @@ extern "C" fn destruct(handle: *mut c_void) {
} }
} }
pub extern "C" fn add(handle: *mut c_void, pub extern "C" fn add(
handle: *mut c_void,
key: *const uint8_t, key: *const uint8_t,
key_len: size_t, key_len: size_t,
value: *const uint8_t, value: *const uint8_t,
value_len: size_t, value_len: size_t,
entry_type: c_int, entry_type: c_int,
seq: uint64_t, seq: uint64_t,
file_size: uint64_t) { file_size: uint64_t,
) {
unsafe { unsafe {
let handle = &mut *(handle as *mut TablePropertiesCollectorHandle); let handle = &mut *(handle as *mut TablePropertiesCollectorHandle);
let key = slice::from_raw_parts(key, key_len); let key = slice::from_raw_parts(key, key_len);
let value = slice::from_raw_parts(value, value_len); let value = slice::from_raw_parts(value, value_len);
handle.rep.add(key, value, mem::transmute(entry_type), seq, file_size); handle
.rep
.add(key, value, mem::transmute(entry_type), seq, file_size);
} }
} }
...@@ -85,18 +84,21 @@ pub extern "C" fn finish(handle: *mut c_void, props: *mut DBUserCollectedPropert ...@@ -85,18 +84,21 @@ pub extern "C" fn finish(handle: *mut c_void, props: *mut DBUserCollectedPropert
unsafe { unsafe {
let handle = &mut *(handle as *mut TablePropertiesCollectorHandle); let handle = &mut *(handle as *mut TablePropertiesCollectorHandle);
for (key, value) in handle.rep.finish() { for (key, value) in handle.rep.finish() {
crocksdb_ffi::crocksdb_user_collected_properties_add(props, crocksdb_ffi::crocksdb_user_collected_properties_add(
props,
key.as_ptr(), key.as_ptr(),
key.len(), key.len(),
value.as_ptr(), value.as_ptr(),
value.len()); value.len(),
);
} }
} }
} }
pub unsafe fn new_table_properties_collector(cname: &str, pub unsafe fn new_table_properties_collector(
collector: Box<TablePropertiesCollector>) cname: &str,
-> *mut DBTablePropertiesCollector { collector: Box<TablePropertiesCollector>,
) -> *mut DBTablePropertiesCollector {
let handle = TablePropertiesCollectorHandle::new(cname, collector); let handle = TablePropertiesCollectorHandle::new(cname, collector);
crocksdb_ffi::crocksdb_table_properties_collector_create( crocksdb_ffi::crocksdb_table_properties_collector_create(
Box::into_raw(Box::new(handle)) as *mut c_void, Box::into_raw(Box::new(handle)) as *mut c_void,
......
...@@ -12,9 +12,9 @@ ...@@ -12,9 +12,9 @@
// limitations under the License. // limitations under the License.
use crocksdb_ffi::{self, DBTablePropertiesCollector, DBTablePropertiesCollectorFactory}; use crocksdb_ffi::{self, DBTablePropertiesCollector, DBTablePropertiesCollectorFactory};
use libc::{c_void, c_char, uint32_t}; use libc::{c_char, c_void, uint32_t};
use std::ffi::CString; use std::ffi::CString;
use table_properties_collector::{TablePropertiesCollector, new_table_properties_collector}; use table_properties_collector::{new_table_properties_collector, TablePropertiesCollector};
/// Constructs `TablePropertiesCollector`. /// Constructs `TablePropertiesCollector`.
/// Internals create a new `TablePropertiesCollector` for each new table. /// Internals create a new `TablePropertiesCollector` for each new table.
...@@ -29,9 +29,10 @@ struct TablePropertiesCollectorFactoryHandle { ...@@ -29,9 +29,10 @@ struct TablePropertiesCollectorFactoryHandle {
} }
impl TablePropertiesCollectorFactoryHandle { impl TablePropertiesCollectorFactoryHandle {
fn new(name: &str, fn new(
rep: Box<TablePropertiesCollectorFactory>) name: &str,
-> TablePropertiesCollectorFactoryHandle { rep: Box<TablePropertiesCollectorFactory>,
) -> TablePropertiesCollectorFactoryHandle {
TablePropertiesCollectorFactoryHandle { TablePropertiesCollectorFactoryHandle {
name: CString::new(name).unwrap(), name: CString::new(name).unwrap(),
rep: rep, rep: rep,
...@@ -52,9 +53,10 @@ extern "C" fn destruct(handle: *mut c_void) { ...@@ -52,9 +53,10 @@ extern "C" fn destruct(handle: *mut c_void) {
} }
} }
extern "C" fn create_table_properties_collector(handle: *mut c_void, extern "C" fn create_table_properties_collector(
cf: uint32_t) handle: *mut c_void,
-> *mut DBTablePropertiesCollector { cf: uint32_t,
) -> *mut DBTablePropertiesCollector {
unsafe { unsafe {
let handle = &mut *(handle as *mut TablePropertiesCollectorFactoryHandle); let handle = &mut *(handle as *mut TablePropertiesCollectorFactoryHandle);
let collector = handle.rep.create_table_properties_collector(cf); let collector = handle.rep.create_table_properties_collector(cf);
...@@ -62,9 +64,10 @@ extern "C" fn create_table_properties_collector(handle: *mut c_void, ...@@ -62,9 +64,10 @@ extern "C" fn create_table_properties_collector(handle: *mut c_void,
} }
} }
pub unsafe fn new_table_properties_collector_factory pub unsafe fn new_table_properties_collector_factory(
(fname: &str, factory: Box<TablePropertiesCollectorFactory>) fname: &str,
-> *mut DBTablePropertiesCollectorFactory { factory: Box<TablePropertiesCollectorFactory>,
) -> *mut DBTablePropertiesCollectorFactory {
let handle = TablePropertiesCollectorFactoryHandle::new(fname, factory); let handle = TablePropertiesCollectorFactoryHandle::new(fname, factory);
crocksdb_ffi::crocksdb_table_properties_collector_factory_create( crocksdb_ffi::crocksdb_table_properties_collector_factory_create(
Box::into_raw(Box::new(handle)) as *mut c_void, Box::into_raw(Box::new(handle)) as *mut c_void,
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
// limitations under the License. // limitations under the License.
// //
use rocksdb::{DB, MergeOperands, DBOptions, ColumnFamilyOptions, Writable}; use rocksdb::{ColumnFamilyOptions, DBOptions, MergeOperands, Writable, DB};
use tempdir::TempDir; use tempdir::TempDir;
#[test] #[test]
...@@ -43,15 +43,15 @@ pub fn test_column_family() { ...@@ -43,15 +43,15 @@ pub fn test_column_family() {
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
cf_opts.add_merge_operator("test operator", test_provided_merge); cf_opts.add_merge_operator("test operator", test_provided_merge);
match DB::open_cf(DBOptions::new(), path_str, vec!["default"], vec![cf_opts]) { match DB::open_cf(DBOptions::new(), path_str, vec!["default"], vec![cf_opts]) {
Ok(_) => { Ok(_) => panic!(
panic!("should not have opened DB successfully without \ "should not have opened DB successfully without \
specifying column specifying column
families") families"
} ),
Err(e) => { Err(e) => assert!(e.starts_with(
assert!(e.starts_with("Invalid argument: You have to open \ "Invalid argument: You have to open \
all column families.")) all column families."
} )),
} }
} }
...@@ -77,11 +77,7 @@ pub fn test_column_family() { ...@@ -77,11 +77,7 @@ pub fn test_column_family() {
}; };
let cf1 = db.cf_handle("cf1").unwrap(); let cf1 = db.cf_handle("cf1").unwrap();
assert!(db.put_cf(cf1, b"k1", b"v1").is_ok()); assert!(db.put_cf(cf1, b"k1", b"v1").is_ok());
assert!(db.get_cf(cf1, b"k1") assert!(db.get_cf(cf1, b"k1").unwrap().unwrap().to_utf8().unwrap() == "v1");
.unwrap()
.unwrap()
.to_utf8()
.unwrap() == "v1");
let p = db.put_cf(cf1, b"k1", b"a"); let p = db.put_cf(cf1, b"k1", b"a");
assert!(p.is_ok()); assert!(p.is_ok());
/* /*
...@@ -117,11 +113,12 @@ pub fn test_column_family() { ...@@ -117,11 +113,12 @@ pub fn test_column_family() {
{} {}
// should be able to drop a cf // should be able to drop a cf
{ {
let mut db = DB::open_cf(DBOptions::new(), let mut db = DB::open_cf(
DBOptions::new(),
path_str, path_str,
vec!["cf1"], vec!["cf1"],
vec![ColumnFamilyOptions::new()]) vec![ColumnFamilyOptions::new()],
.unwrap(); ).unwrap();
match db.drop_cf("cf1") { match db.drop_cf("cf1") {
Ok(_) => println!("cf1 successfully dropped."), Ok(_) => println!("cf1 successfully dropped."),
Err(e) => panic!("failed to drop column family: {}", e), Err(e) => panic!("failed to drop column family: {}", e),
...@@ -129,18 +126,17 @@ pub fn test_column_family() { ...@@ -129,18 +126,17 @@ pub fn test_column_family() {
} }
} }
fn test_provided_merge(_: &[u8], fn test_provided_merge(
_: &[u8],
existing_val: Option<&[u8]>, existing_val: Option<&[u8]>,
operands: &mut MergeOperands) operands: &mut MergeOperands,
-> Vec<u8> { ) -> Vec<u8> {
let nops = operands.size_hint().0; let nops = operands.size_hint().0;
let mut result: Vec<u8> = Vec::with_capacity(nops); let mut result: Vec<u8> = Vec::with_capacity(nops);
match existing_val { match existing_val {
Some(v) => { Some(v) => for e in v {
for e in v {
result.push(*e); result.push(*e);
} },
}
None => (), None => (),
} }
for op in operands { for op in operands {
......
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use rocksdb::{DB, DBOptions, Range, Writable}; use rocksdb::{DBOptions, Range, Writable, DB};
use tempdir::TempDir; use tempdir::TempDir;
...@@ -21,11 +21,13 @@ fn test_compact_range() { ...@@ -21,11 +21,13 @@ fn test_compact_range() {
let mut opts = DBOptions::new(); let mut opts = DBOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
let db = DB::open(opts, path.path().to_str().unwrap()).unwrap(); let db = DB::open(opts, path.path().to_str().unwrap()).unwrap();
let samples = vec![(b"k1".to_vec(), b"value--------1".to_vec()), let samples = vec![
(b"k1".to_vec(), b"value--------1".to_vec()),
(b"k2".to_vec(), b"value--------2".to_vec()), (b"k2".to_vec(), b"value--------2".to_vec()),
(b"k3".to_vec(), b"value--------3".to_vec()), (b"k3".to_vec(), b"value--------3".to_vec()),
(b"k4".to_vec(), b"value--------4".to_vec()), (b"k4".to_vec(), b"value--------4".to_vec()),
(b"k5".to_vec(), b"value--------5".to_vec())]; (b"k5".to_vec(), b"value--------5".to_vec()),
];
for &(ref k, ref v) in &samples { for &(ref k, ref v) in &samples {
db.put(k, v).unwrap(); db.put(k, v).unwrap();
assert_eq!(v.as_slice(), &*db.get(k).unwrap().unwrap()); assert_eq!(v.as_slice(), &*db.get(k).unwrap().unwrap());
......
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use rocksdb::{Writable, DB, CompactionFilter, DBOptions, ColumnFamilyOptions}; use rocksdb::{ColumnFamilyOptions, CompactionFilter, DBOptions, Writable, DB};
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use tempdir::TempDir; use tempdir::TempDir;
...@@ -44,22 +44,28 @@ fn test_compaction_filter() { ...@@ -44,22 +44,28 @@ fn test_compaction_filter() {
let drop_called = Arc::new(AtomicBool::new(false)); let drop_called = Arc::new(AtomicBool::new(false));
let filtered_kvs = Arc::new(RwLock::new(vec![])); let filtered_kvs = Arc::new(RwLock::new(vec![]));
// set ignore_snapshots to false // set ignore_snapshots to false
cf_opts.set_compaction_filter("test", cf_opts
.set_compaction_filter(
"test",
false, false,
Box::new(Filter { Box::new(Filter {
drop_called: drop_called.clone(), drop_called: drop_called.clone(),
filtered_kvs: filtered_kvs.clone(), filtered_kvs: filtered_kvs.clone(),
})) }),
)
.unwrap(); .unwrap();
let mut opts = DBOptions::new(); let mut opts = DBOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
let samples = vec![(b"key1".to_vec(), b"value1".to_vec()), let samples = vec![
(b"key2".to_vec(), b"value2".to_vec())]; (b"key1".to_vec(), b"value1".to_vec()),
(b"key2".to_vec(), b"value2".to_vec()),
];
for &(ref k, ref v) in &samples { for &(ref k, ref v) in &samples {
db.put(k, v).unwrap(); db.put(k, v).unwrap();
assert_eq!(v.as_slice(), &*db.get(k).unwrap().unwrap()); assert_eq!(v.as_slice(), &*db.get(k).unwrap().unwrap());
...@@ -79,21 +85,25 @@ fn test_compaction_filter() { ...@@ -79,21 +85,25 @@ fn test_compaction_filter() {
// reregister with ignore_snapshots set to true // reregister with ignore_snapshots set to true
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
let opts = DBOptions::new(); let opts = DBOptions::new();
cf_opts.set_compaction_filter("test", cf_opts
.set_compaction_filter(
"test",
true, true,
Box::new(Filter { Box::new(Filter {
drop_called: drop_called.clone(), drop_called: drop_called.clone(),
filtered_kvs: filtered_kvs.clone(), filtered_kvs: filtered_kvs.clone(),
})) }),
)
.unwrap(); .unwrap();
assert!(drop_called.load(Ordering::Relaxed)); assert!(drop_called.load(Ordering::Relaxed));
drop_called.store(false, Ordering::Relaxed); drop_called.store(false, Ordering::Relaxed);
{ {
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
let _snap = db.snapshot(); let _snap = db.snapshot();
// Because ignore_snapshots is true, so all the keys will be compacted. // Because ignore_snapshots is true, so all the keys will be compacted.
db.compact_range(Some(b"key1"), Some(b"key3")); db.compact_range(Some(b"key1"), Some(b"key3"));
......
This diff is collapsed.
...@@ -86,17 +86,19 @@ fn test_event_listener_basic() { ...@@ -86,17 +86,19 @@ fn test_event_listener_basic() {
opts.create_if_missing(true); opts.create_if_missing(true);
let db = DB::open(opts, path_str).unwrap(); let db = DB::open(opts, path_str).unwrap();
for i in 1..8000 { for i in 1..8000 {
db.put(format!("{:04}", i).as_bytes(), db.put(
format!("{:04}", i).as_bytes()) format!("{:04}", i).as_bytes(),
.unwrap(); format!("{:04}", i).as_bytes(),
).unwrap();
} }
db.flush(true).unwrap(); db.flush(true).unwrap();
assert_ne!(counter.flush.load(Ordering::SeqCst), 0); assert_ne!(counter.flush.load(Ordering::SeqCst), 0);
for i in 1..8000 { for i in 1..8000 {
db.put(format!("{:04}", i).as_bytes(), db.put(
format!("{:04}", i).as_bytes()) format!("{:04}", i).as_bytes(),
.unwrap(); format!("{:04}", i).as_bytes(),
).unwrap();
} }
db.flush(true).unwrap(); db.flush(true).unwrap();
let flush_cnt = counter.flush.load(Ordering::SeqCst); let flush_cnt = counter.flush.load(Ordering::SeqCst);
...@@ -125,10 +127,12 @@ fn test_event_listener_ingestion() { ...@@ -125,10 +127,12 @@ fn test_event_listener_ingestion() {
let test_sstfile_str = test_sstfile.to_str().unwrap(); let test_sstfile_str = test_sstfile.to_str().unwrap();
let default_options = db.get_options(); let default_options = db.get_options();
gen_sst(default_options, gen_sst(
default_options,
Some(db.cf_handle("default").unwrap()), Some(db.cf_handle("default").unwrap()),
test_sstfile_str, test_sstfile_str,
&[(b"k1", b"v1"), (b"k2", b"v2")]); &[(b"k1", b"v1"), (b"k2", b"v2")],
);
let ingest_opt = IngestExternalFileOptions::new(); let ingest_opt = IngestExternalFileOptions::new();
db.ingest_external_file(&ingest_opt, &[test_sstfile_str]) db.ingest_external_file(&ingest_opt, &[test_sstfile_str])
......
...@@ -16,10 +16,12 @@ use rocksdb::*; ...@@ -16,10 +16,12 @@ use rocksdb::*;
use std::fs; use std::fs;
use tempdir::TempDir; use tempdir::TempDir;
pub fn gen_sst(opt: ColumnFamilyOptions, pub fn gen_sst(
opt: ColumnFamilyOptions,
cf: Option<&CFHandle>, cf: Option<&CFHandle>,
path: &str, path: &str,
data: &[(&[u8], &[u8])]) { data: &[(&[u8], &[u8])],
) {
let _ = fs::remove_file(path); let _ = fs::remove_file(path);
let env_opt = EnvOptions::new(); let env_opt = EnvOptions::new();
let mut writer = if cf.is_some() { let mut writer = if cf.is_some() {
...@@ -79,11 +81,9 @@ fn gen_sst_delete(opt: ColumnFamilyOptions, cf: Option<&CFHandle>, path: &str) { ...@@ -79,11 +81,9 @@ fn gen_sst_delete(opt: ColumnFamilyOptions, cf: Option<&CFHandle>, path: &str) {
fn concat_merge(_: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Vec<u8> { fn concat_merge(_: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Vec<u8> {
let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0); let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0);
match existing_val { match existing_val {
Some(v) => { Some(v) => for e in v {
for e in v {
result.push(*e) result.push(*e)
} },
}
None => (), None => (),
} }
for op in operands { for op in operands {
...@@ -106,10 +106,12 @@ fn test_ingest_external_file() { ...@@ -106,10 +106,12 @@ fn test_ingest_external_file() {
let test_sstfile_str = test_sstfile.to_str().unwrap(); let test_sstfile_str = test_sstfile.to_str().unwrap();
let default_options = db.get_options(); let default_options = db.get_options();
gen_sst(default_options, gen_sst(
default_options,
Some(db.cf_handle("default").unwrap()), Some(db.cf_handle("default").unwrap()),
test_sstfile_str, test_sstfile_str,
&[(b"k1", b"v1"), (b"k2", b"v2")]); &[(b"k1", b"v1"), (b"k2", b"v2")],
);
let mut ingest_opt = IngestExternalFileOptions::new(); let mut ingest_opt = IngestExternalFileOptions::new();
db.ingest_external_file(&ingest_opt, &[test_sstfile_str]) db.ingest_external_file(&ingest_opt, &[test_sstfile_str])
.unwrap(); .unwrap();
...@@ -117,20 +119,24 @@ fn test_ingest_external_file() { ...@@ -117,20 +119,24 @@ fn test_ingest_external_file() {
assert_eq!(db.get(b"k1").unwrap().unwrap(), b"v1"); assert_eq!(db.get(b"k1").unwrap().unwrap(), b"v1");
assert_eq!(db.get(b"k2").unwrap().unwrap(), b"v2"); assert_eq!(db.get(b"k2").unwrap().unwrap(), b"v2");
gen_sst(ColumnFamilyOptions::new(), gen_sst(
ColumnFamilyOptions::new(),
None, None,
test_sstfile_str, test_sstfile_str,
&[(b"k1", b"v3"), (b"k2", b"v4")]); &[(b"k1", b"v3"), (b"k2", b"v4")],
);
db.ingest_external_file_cf(handle, &ingest_opt, &[test_sstfile_str]) db.ingest_external_file_cf(handle, &ingest_opt, &[test_sstfile_str])
.unwrap(); .unwrap();
assert_eq!(db.get_cf(handle, b"k1").unwrap().unwrap(), b"v3"); assert_eq!(db.get_cf(handle, b"k1").unwrap().unwrap(), b"v3");
assert_eq!(db.get_cf(handle, b"k2").unwrap().unwrap(), b"v4"); assert_eq!(db.get_cf(handle, b"k2").unwrap().unwrap(), b"v4");
let snap = db.snapshot(); let snap = db.snapshot();
gen_sst(ColumnFamilyOptions::new(), gen_sst(
ColumnFamilyOptions::new(),
None, None,
test_sstfile_str, test_sstfile_str,
&[(b"k2", b"v5"), (b"k3", b"v6")]); &[(b"k2", b"v5"), (b"k3", b"v6")],
);
ingest_opt.move_files(true); ingest_opt.move_files(true);
db.ingest_external_file_cf(handle, &ingest_opt, &[test_sstfile_str]) db.ingest_external_file_cf(handle, &ingest_opt, &[test_sstfile_str])
.unwrap(); .unwrap();
...@@ -157,9 +163,11 @@ fn test_ingest_external_file_new() { ...@@ -157,9 +163,11 @@ fn test_ingest_external_file_new() {
let test_sstfile_str = test_sstfile.to_str().unwrap(); let test_sstfile_str = test_sstfile.to_str().unwrap();
let default_options = db.get_options(); let default_options = db.get_options();
gen_sst_put(default_options, gen_sst_put(
default_options,
Some(db.cf_handle("default").unwrap()), Some(db.cf_handle("default").unwrap()),
test_sstfile_str); test_sstfile_str,
);
let mut ingest_opt = IngestExternalFileOptions::new(); let mut ingest_opt = IngestExternalFileOptions::new();
db.ingest_external_file(&ingest_opt, &[test_sstfile_str]) db.ingest_external_file(&ingest_opt, &[test_sstfile_str])
.unwrap(); .unwrap();
...@@ -171,9 +179,11 @@ fn test_ingest_external_file_new() { ...@@ -171,9 +179,11 @@ fn test_ingest_external_file_new() {
let snap = db.snapshot(); let snap = db.snapshot();
let default_options = db.get_options(); let default_options = db.get_options();
gen_sst_merge(default_options, gen_sst_merge(
default_options,
Some(db.cf_handle("default").unwrap()), Some(db.cf_handle("default").unwrap()),
test_sstfile_str); test_sstfile_str,
);
db.ingest_external_file(&ingest_opt, &[test_sstfile_str]) db.ingest_external_file(&ingest_opt, &[test_sstfile_str])
.unwrap(); .unwrap();
...@@ -182,9 +192,11 @@ fn test_ingest_external_file_new() { ...@@ -182,9 +192,11 @@ fn test_ingest_external_file_new() {
assert_eq!(db.get(b"k3").unwrap().unwrap(), b"cd"); assert_eq!(db.get(b"k3").unwrap().unwrap(), b"cd");
let default_options = db.get_options(); let default_options = db.get_options();
gen_sst_delete(default_options, gen_sst_delete(
default_options,
Some(db.cf_handle("default").unwrap()), Some(db.cf_handle("default").unwrap()),
test_sstfile_str); test_sstfile_str,
);
ingest_opt.move_files(true); ingest_opt.move_files(true);
db.ingest_external_file(&ingest_opt, &[test_sstfile_str]) db.ingest_external_file(&ingest_opt, &[test_sstfile_str])
.unwrap(); .unwrap();
...@@ -300,10 +312,12 @@ fn test_ingest_simulate_real_world() { ...@@ -300,10 +312,12 @@ fn test_ingest_simulate_real_world() {
for cf in &ALL_CFS { for cf in &ALL_CFS {
let handle = db.cf_handle(cf).unwrap(); let handle = db.cf_handle(cf).unwrap();
let cf_opts = ColumnFamilyOptions::new(); let cf_opts = ColumnFamilyOptions::new();
put_delete_and_generate_sst_cf(cf_opts, put_delete_and_generate_sst_cf(
cf_opts,
&db, &db,
&handle, &handle,
gen_path.path().join(cf).to_str().unwrap()); gen_path.path().join(cf).to_str().unwrap(),
);
} }
let path2 = TempDir::new("_rust_rocksdb_ingest_real_world_2").expect(""); let path2 = TempDir::new("_rust_rocksdb_ingest_real_world_2").expect("");
...@@ -318,29 +332,47 @@ fn test_ingest_simulate_real_world() { ...@@ -318,29 +332,47 @@ fn test_ingest_simulate_real_world() {
let handle = db2.cf_handle(cf).unwrap(); let handle = db2.cf_handle(cf).unwrap();
let mut ingest_opt = IngestExternalFileOptions::new(); let mut ingest_opt = IngestExternalFileOptions::new();
ingest_opt.move_files(true); ingest_opt.move_files(true);
db2.ingest_external_file_cf(handle, db2.ingest_external_file_cf(
handle,
&ingest_opt, &ingest_opt,
&[gen_path.path().join(cf).to_str().unwrap()]) &[gen_path.path().join(cf).to_str().unwrap()],
.unwrap(); ).unwrap();
check_kv(&db, check_kv(
&db,
db.cf_handle(cf), db.cf_handle(cf),
&[(b"k1", None), (b"k2", Some(b"v2")), (b"k3", None), (b"k4", Some(b"v4"))]); &[
(b"k1", None),
(b"k2", Some(b"v2")),
(b"k3", None),
(b"k4", Some(b"v4")),
],
);
let cf_opts = ColumnFamilyOptions::new(); let cf_opts = ColumnFamilyOptions::new();
gen_sst_from_cf(cf_opts, gen_sst_from_cf(
cf_opts,
&db2, &db2,
&handle, &handle,
gen_path.path().join(cf).to_str().unwrap()); gen_path.path().join(cf).to_str().unwrap(),
);
} }
for cf in &ALL_CFS { for cf in &ALL_CFS {
let handle = db.cf_handle(cf).unwrap(); let handle = db.cf_handle(cf).unwrap();
let ingest_opt = IngestExternalFileOptions::new(); let ingest_opt = IngestExternalFileOptions::new();
db.ingest_external_file_cf(handle, db.ingest_external_file_cf(
handle,
&ingest_opt, &ingest_opt,
&[gen_path.path().join(cf).to_str().unwrap()]) &[gen_path.path().join(cf).to_str().unwrap()],
.unwrap(); ).unwrap();
check_kv(&db, check_kv(
&db,
db.cf_handle(cf), db.cf_handle(cf),
&[(b"k1", None), (b"k2", Some(b"v2")), (b"k3", None), (b"k4", Some(b"v4"))]); &[
(b"k1", None),
(b"k2", Some(b"v2")),
(b"k3", None),
(b"k4", Some(b"v4")),
],
);
} }
} }
...@@ -79,8 +79,11 @@ pub fn test_iterator() { ...@@ -79,8 +79,11 @@ pub fn test_iterator() {
assert!(p.is_ok()); assert!(p.is_ok());
let p = db.put(k3, v3); let p = db.put(k3, v3);
assert!(p.is_ok()); assert!(p.is_ok());
let expected = let expected = vec![
vec![(k1.to_vec(), v1.to_vec()), (k2.to_vec(), v2.to_vec()), (k3.to_vec(), v3.to_vec())]; (k1.to_vec(), v1.to_vec()),
(k2.to_vec(), v2.to_vec()),
(k3.to_vec(), v3.to_vec()),
];
let mut iter = db.iter(); let mut iter = db.iter();
...@@ -113,10 +116,12 @@ pub fn test_iterator() { ...@@ -113,10 +116,12 @@ pub fn test_iterator() {
old_iterator.seek(SeekKey::Start); old_iterator.seek(SeekKey::Start);
let p = db.put(&*k4, &*v4); let p = db.put(&*k4, &*v4);
assert!(p.is_ok()); assert!(p.is_ok());
let expected2 = vec![(k1.to_vec(), v1.to_vec()), let expected2 = vec![
(k1.to_vec(), v1.to_vec()),
(k2.to_vec(), v2.to_vec()), (k2.to_vec(), v2.to_vec()),
(k3.to_vec(), v3.to_vec()), (k3.to_vec(), v3.to_vec()),
(k4.to_vec(), v4.to_vec())]; (k4.to_vec(), v4.to_vec()),
];
assert_eq!(old_iterator.collect::<Vec<_>>(), expected); assert_eq!(old_iterator.collect::<Vec<_>>(), expected);
iter = db.iter(); iter = db.iter();
...@@ -124,8 +129,11 @@ pub fn test_iterator() { ...@@ -124,8 +129,11 @@ pub fn test_iterator() {
assert_eq!(iter.collect::<Vec<_>>(), expected2); assert_eq!(iter.collect::<Vec<_>>(), expected2);
iter.seek(SeekKey::Key(k2)); iter.seek(SeekKey::Key(k2));
let expected = let expected = vec![
vec![(k2.to_vec(), v2.to_vec()), (k3.to_vec(), v3.to_vec()), (k4.to_vec(), v4.to_vec())]; (k2.to_vec(), v2.to_vec()),
(k3.to_vec(), v3.to_vec()),
(k4.to_vec(), v4.to_vec()),
];
assert_eq!(iter.collect::<Vec<_>>(), expected); assert_eq!(iter.collect::<Vec<_>>(), expected);
iter.seek(SeekKey::Key(k2)); iter.seek(SeekKey::Key(k2));
...@@ -241,19 +249,32 @@ fn test_total_order_seek() { ...@@ -241,19 +249,32 @@ fn test_total_order_seek() {
let mut opts = DBOptions::new(); let mut opts = DBOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.set_block_based_table_factory(&bbto); cf_opts.set_block_based_table_factory(&bbto);
cf_opts.set_prefix_extractor("FixedPrefixTransform", cf_opts
Box::new(FixedPrefixTransform { prefix_len: 2 })) .set_prefix_extractor(
"FixedPrefixTransform",
Box::new(FixedPrefixTransform { prefix_len: 2 }),
)
.unwrap(); .unwrap();
// also create prefix bloom for memtable // also create prefix bloom for memtable
cf_opts.set_memtable_prefix_bloom_size_ratio(0.1 as f64); cf_opts.set_memtable_prefix_bloom_size_ratio(0.1 as f64);
let keys = vec![b"k1-1", b"k1-2", b"k1-3", b"k2-1", b"k2-2", b"k2-3", b"k3-1", b"k3-2", let keys = vec![
b"k3-3"]; b"k1-1",
let db = DB::open_cf(opts, b"k1-2",
b"k1-3",
b"k2-1",
b"k2-2",
b"k2-3",
b"k3-1",
b"k3-2",
b"k3-3",
];
let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
let wopts = WriteOptions::new(); let wopts = WriteOptions::new();
// sst1 // sst1
...@@ -324,15 +345,19 @@ fn test_fixed_suffix_seek() { ...@@ -324,15 +345,19 @@ fn test_fixed_suffix_seek() {
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.set_block_based_table_factory(&bbto); cf_opts.set_block_based_table_factory(&bbto);
cf_opts.set_prefix_extractor("FixedSuffixTransform", cf_opts
Box::new(FixedSuffixTransform { suffix_len: 2 })) .set_prefix_extractor(
"FixedSuffixTransform",
Box::new(FixedSuffixTransform { suffix_len: 2 }),
)
.unwrap(); .unwrap();
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
db.put(b"k-eghe-5", b"a").unwrap(); db.put(b"k-eghe-5", b"a").unwrap();
db.put(b"k-24yfae-6", b"a").unwrap(); db.put(b"k-24yfae-6", b"a").unwrap();
db.put(b"k-h1fwd-7", b"a").unwrap(); db.put(b"k-h1fwd-7", b"a").unwrap();
......
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use rocksdb::{DB, Writable}; use rocksdb::{Writable, DB};
use std::sync::Arc; use std::sync::Arc;
use std::thread; use std::thread;
use tempdir::TempDir; use tempdir::TempDir;
...@@ -40,11 +40,9 @@ pub fn test_multithreaded() { ...@@ -40,11 +40,9 @@ pub fn test_multithreaded() {
let db3 = db.clone(); let db3 = db.clone();
let j3 = thread::spawn(move || for _ in 1..N { let j3 = thread::spawn(move || for _ in 1..N {
match db3.get(b"key") { match db3.get(b"key") {
Ok(Some(v)) => { Ok(Some(v)) => if &v[..] != b"value1" && &v[..] != b"value2" {
if &v[..] != b"value1" && &v[..] != b"value2" {
assert!(false); assert!(false);
} },
}
_ => { _ => {
assert!(false); assert!(false);
} }
......
...@@ -31,8 +31,17 @@ impl SliceTransform for FixedPrefixTransform { ...@@ -31,8 +31,17 @@ impl SliceTransform for FixedPrefixTransform {
#[test] #[test]
fn test_prefix_extractor_compatibility() { fn test_prefix_extractor_compatibility() {
let path = TempDir::new("_rust_rocksdb_prefix_extractor_compatibility").expect(""); let path = TempDir::new("_rust_rocksdb_prefix_extractor_compatibility").expect("");
let keys = vec![b"k1-0", b"k1-1", b"k1-2", b"k1-3", b"k1-4", b"k1-5", b"k1-6", b"k1-7", let keys = vec![
b"k1-8"]; b"k1-0",
b"k1-1",
b"k1-2",
b"k1-3",
b"k1-4",
b"k1-5",
b"k1-6",
b"k1-7",
b"k1-8",
];
// create db with no prefix extractor, and insert data // create db with no prefix extractor, and insert data
{ {
...@@ -57,16 +66,20 @@ fn test_prefix_extractor_compatibility() { ...@@ -57,16 +66,20 @@ fn test_prefix_extractor_compatibility() {
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(false); opts.create_if_missing(false);
cf_opts.set_block_based_table_factory(&bbto); cf_opts.set_block_based_table_factory(&bbto);
cf_opts.set_prefix_extractor("FixedPrefixTransform", cf_opts
Box::new(FixedPrefixTransform { prefix_len: 2 })) .set_prefix_extractor(
"FixedPrefixTransform",
Box::new(FixedPrefixTransform { prefix_len: 2 }),
)
.unwrap(); .unwrap();
// also create prefix bloom for memtable // also create prefix bloom for memtable
cf_opts.set_memtable_prefix_bloom_size_ratio(0.1 as f64); cf_opts.set_memtable_prefix_bloom_size_ratio(0.1 as f64);
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
let wopts = WriteOptions::new(); let wopts = WriteOptions::new();
// sst2 with prefix bloom. // sst2 with prefix bloom.
......
...@@ -11,11 +11,11 @@ ...@@ -11,11 +11,11 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use rocksdb::{DB, ColumnFamilyOptions, DBOptions, BlockBasedOptions, WriteOptions, ReadOptions, use rocksdb::{BlockBasedOptions, ColumnFamilyOptions, CompactOptions, DBOptions, ReadOptions,
SliceTransform, Writable, CompactOptions, SeekKey}; SeekKey, SliceTransform, Writable, WriteOptions, DB};
use rocksdb::crocksdb_ffi::{DBStatisticsHistogramType as HistogramType, use rocksdb::crocksdb_ffi::{CompactionPriority, DBCompressionType, DBInfoLogLevel as InfoLogLevel,
DBStatisticsTickerType as TickerType, DBInfoLogLevel as InfoLogLevel, DBStatisticsHistogramType as HistogramType,
CompactionPriority, DBCompressionType}; DBStatisticsTickerType as TickerType};
use std::path::Path; use std::path::Path;
use std::thread; use std::thread;
use std::time::Duration; use std::time::Duration;
...@@ -29,11 +29,12 @@ fn test_set_num_levels() { ...@@ -29,11 +29,12 @@ fn test_set_num_levels() {
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.set_num_levels(2); cf_opts.set_num_levels(2);
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
drop(db); drop(db);
} }
...@@ -71,14 +72,22 @@ fn test_enable_statistics() { ...@@ -71,14 +72,22 @@ fn test_enable_statistics() {
opts.enable_statistics(); opts.enable_statistics();
opts.set_stats_dump_period_sec(60); opts.set_stats_dump_period_sec(60);
assert!(opts.get_statistics().is_some()); assert!(opts.get_statistics().is_some());
assert!(opts.get_statistics_histogram_string(HistogramType::SeekMicros) assert!(
.is_some()); opts.get_statistics_histogram_string(HistogramType::SeekMicros)
assert_eq!(opts.get_statistics_ticker_count(TickerType::BlockCacheMiss), .is_some()
0); );
assert_eq!(opts.get_and_reset_statistics_ticker_count(TickerType::BlockCacheMiss), assert_eq!(
0); opts.get_statistics_ticker_count(TickerType::BlockCacheMiss),
assert_eq!(opts.get_statistics_ticker_count(TickerType::BlockCacheMiss), 0
0); );
assert_eq!(
opts.get_and_reset_statistics_ticker_count(TickerType::BlockCacheMiss),
0
);
assert_eq!(
opts.get_statistics_ticker_count(TickerType::BlockCacheMiss),
0
);
let opts = DBOptions::new(); let opts = DBOptions::new();
assert!(opts.get_statistics().is_none()); assert!(opts.get_statistics().is_none());
...@@ -104,16 +113,18 @@ fn test_memtable_insert_hint_prefix_extractor() { ...@@ -104,16 +113,18 @@ fn test_memtable_insert_hint_prefix_extractor() {
let mut opts = DBOptions::new(); let mut opts = DBOptions::new();
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.set_memtable_insert_hint_prefix_extractor("FixedPrefixTransform", cf_opts
Box::new(FixedPrefixTransform { .set_memtable_insert_hint_prefix_extractor(
prefix_len: 2, "FixedPrefixTransform",
})) Box::new(FixedPrefixTransform { prefix_len: 2 }),
)
.unwrap(); .unwrap();
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
let wopts = WriteOptions::new(); let wopts = WriteOptions::new();
db.put_opt(b"k0-1", b"a", &wopts).unwrap(); db.put_opt(b"k0-1", b"a", &wopts).unwrap();
...@@ -229,11 +240,12 @@ fn test_set_pin_l0_filter_and_index_blocks_in_cache() { ...@@ -229,11 +240,12 @@ fn test_set_pin_l0_filter_and_index_blocks_in_cache() {
let mut block_opts = BlockBasedOptions::new(); let mut block_opts = BlockBasedOptions::new();
block_opts.set_pin_l0_filter_and_index_blocks_in_cache(true); block_opts.set_pin_l0_filter_and_index_blocks_in_cache(true);
cf_opts.set_block_based_table_factory(&block_opts); cf_opts.set_block_based_table_factory(&block_opts);
DB::open_cf(opts, DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
} }
#[test] #[test]
fn test_pending_compaction_bytes_limit() { fn test_pending_compaction_bytes_limit() {
...@@ -243,11 +255,12 @@ fn test_pending_compaction_bytes_limit() { ...@@ -243,11 +255,12 @@ fn test_pending_compaction_bytes_limit() {
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.set_soft_pending_compaction_bytes_limit(64 * 1024 * 1024 * 1024); cf_opts.set_soft_pending_compaction_bytes_limit(64 * 1024 * 1024 * 1024);
cf_opts.set_hard_pending_compaction_bytes_limit(256 * 1024 * 1024 * 1024); cf_opts.set_hard_pending_compaction_bytes_limit(256 * 1024 * 1024 * 1024);
DB::open_cf(opts, DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
} }
#[test] #[test]
...@@ -276,11 +289,12 @@ fn test_set_optimize_filters_for_hits() { ...@@ -276,11 +289,12 @@ fn test_set_optimize_filters_for_hits() {
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.set_optimize_filters_for_hits(true); cf_opts.set_optimize_filters_for_hits(true);
DB::open_cf(opts, DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
} }
#[test] #[test]
...@@ -295,11 +309,12 @@ fn test_get_block_cache_usage() { ...@@ -295,11 +309,12 @@ fn test_get_block_cache_usage() {
let mut block_opts = BlockBasedOptions::new(); let mut block_opts = BlockBasedOptions::new();
block_opts.set_lru_cache(16 * 1024 * 1024); block_opts.set_lru_cache(16 * 1024 * 1024);
cf_opts.set_block_based_table_factory(&block_opts); cf_opts.set_block_based_table_factory(&block_opts);
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
for i in 0..200 { for i in 0..200 {
db.put(format!("k_{}", i).as_bytes(), b"v").unwrap(); db.put(format!("k_{}", i).as_bytes(), b"v").unwrap();
...@@ -319,11 +334,12 @@ fn test_set_level_compaction_dynamic_level_bytes() { ...@@ -319,11 +334,12 @@ fn test_set_level_compaction_dynamic_level_bytes() {
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.set_level_compaction_dynamic_level_bytes(true); cf_opts.set_level_compaction_dynamic_level_bytes(true);
DB::open_cf(opts, DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
} }
#[test] #[test]
...@@ -375,11 +391,12 @@ fn test_set_compaction_pri() { ...@@ -375,11 +391,12 @@ fn test_set_compaction_pri() {
let mut cf_opts = ColumnFamilyOptions::new(); let mut cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.compaction_priority(CompactionPriority::MinOverlappingRatio); cf_opts.compaction_priority(CompactionPriority::MinOverlappingRatio);
DB::open_cf(opts, DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
} }
#[test] #[test]
...@@ -438,11 +455,12 @@ fn test_bottommost_compression() { ...@@ -438,11 +455,12 @@ fn test_bottommost_compression() {
let cf_opts = ColumnFamilyOptions::new(); let cf_opts = ColumnFamilyOptions::new();
opts.create_if_missing(true); opts.create_if_missing(true);
cf_opts.bottommost_compression(DBCompressionType::No); cf_opts.bottommost_compression(DBCompressionType::No);
DB::open_cf(opts, DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
} }
#[test] #[test]
......
...@@ -11,8 +11,8 @@ ...@@ -11,8 +11,8 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
use rocksdb::{Writable, DB, SliceTransform, ColumnFamilyOptions, DBOptions, SeekKey, use rocksdb::{BlockBasedOptions, ColumnFamilyOptions, DBOptions, SeekKey, SliceTransform,
BlockBasedOptions}; Writable, DB};
use tempdir::TempDir; use tempdir::TempDir;
struct FixedPostfixTransform { struct FixedPostfixTransform {
...@@ -43,19 +43,23 @@ fn test_slice_transform() { ...@@ -43,19 +43,23 @@ fn test_slice_transform() {
cf_opts.set_block_based_table_factory(&block_opts); cf_opts.set_block_based_table_factory(&block_opts);
cf_opts.set_memtable_prefix_bloom_size_ratio(0.25); cf_opts.set_memtable_prefix_bloom_size_ratio(0.25);
cf_opts.set_prefix_extractor("test", Box::new(FixedPostfixTransform { postfix_len: 2 })) cf_opts
.set_prefix_extractor("test", Box::new(FixedPostfixTransform { postfix_len: 2 }))
.unwrap(); .unwrap();
opts.create_if_missing(true); opts.create_if_missing(true);
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
let samples = vec![(b"key_01".to_vec(), b"1".to_vec()), let samples = vec![
(b"key_01".to_vec(), b"1".to_vec()),
(b"key_02".to_vec(), b"2".to_vec()), (b"key_02".to_vec(), b"2".to_vec()),
(b"key_0303".to_vec(), b"3".to_vec()), (b"key_0303".to_vec(), b"3".to_vec()),
(b"key_0404".to_vec(), b"4".to_vec())]; (b"key_0404".to_vec(), b"4".to_vec()),
];
for &(ref k, ref v) in &samples { for &(ref k, ref v) in &samples {
db.put(k, v).unwrap(); db.put(k, v).unwrap();
...@@ -64,17 +68,23 @@ fn test_slice_transform() { ...@@ -64,17 +68,23 @@ fn test_slice_transform() {
let mut it = db.iter(); let mut it = db.iter();
let invalid_seeks = let invalid_seeks = vec![
vec![b"key_".to_vec(), b"key_0".to_vec(), b"key_030".to_vec(), b"key_03000".to_vec()]; b"key_".to_vec(),
b"key_0".to_vec(),
b"key_030".to_vec(),
b"key_03000".to_vec(),
];
for key in invalid_seeks { for key in invalid_seeks {
it.seek(SeekKey::Key(&key)); it.seek(SeekKey::Key(&key));
assert!(!it.valid()); assert!(!it.valid());
} }
let valid_seeks = vec![(b"key_00".to_vec(), b"key_01".to_vec()), let valid_seeks = vec![
(b"key_00".to_vec(), b"key_01".to_vec()),
(b"key_03".to_vec(), b"key_0303".to_vec()), (b"key_03".to_vec(), b"key_0303".to_vec()),
(b"key_0301".to_vec(), b"key_0303".to_vec())]; (b"key_0301".to_vec(), b"key_0303".to_vec()),
];
for (key, expect_key) in valid_seeks { for (key, expect_key) in valid_seeks {
it.seek(SeekKey::Key(&key)); it.seek(SeekKey::Key(&key));
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
// limitations under the License. // limitations under the License.
use rocksdb::*; use rocksdb::*;
use rocksdb::{DBStatisticsTickerType as TickerType, DBStatisticsHistogramType as HistogramType}; use rocksdb::{DBStatisticsHistogramType as HistogramType, DBStatisticsTickerType as TickerType};
use tempdir::TempDir; use tempdir::TempDir;
#[test] #[test]
...@@ -35,8 +35,12 @@ fn test_db_statistics() { ...@@ -35,8 +35,12 @@ fn test_db_statistics() {
assert!(db.get_statistics_ticker_count(TickerType::BlockCacheHit) > 0); assert!(db.get_statistics_ticker_count(TickerType::BlockCacheHit) > 0);
assert!(db.get_and_reset_statistics_ticker_count(TickerType::BlockCacheHit) > 0); assert!(db.get_and_reset_statistics_ticker_count(TickerType::BlockCacheHit) > 0);
assert_eq!(db.get_statistics_ticker_count(TickerType::BlockCacheHit), 0); assert_eq!(db.get_statistics_ticker_count(TickerType::BlockCacheHit), 0);
assert!(db.get_statistics_histogram_string(HistogramType::GetMicros) assert!(
.is_some()); db.get_statistics_histogram_string(HistogramType::GetMicros)
assert!(db.get_statistics_histogram(HistogramType::GetMicros) .is_some()
.is_some()); );
assert!(
db.get_statistics_histogram(HistogramType::GetMicros)
.is_some()
);
} }
...@@ -12,9 +12,9 @@ ...@@ -12,9 +12,9 @@
// limitations under the License. // limitations under the License.
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use rocksdb::{DB, Range, ColumnFamilyOptions, DBOptions, Writable, DBEntryType, use rocksdb::{ColumnFamilyOptions, DBEntryType, DBOptions, Range, TablePropertiesCollection,
TablePropertiesCollection, TablePropertiesCollector, TablePropertiesCollector, TablePropertiesCollectorFactory, UserCollectedProperties,
TablePropertiesCollectorFactory, UserCollectedProperties}; Writable, DB};
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt; use std::fmt;
use tempdir::TempDir; use tempdir::TempDir;
...@@ -82,7 +82,11 @@ impl ExampleCollector { ...@@ -82,7 +82,11 @@ impl ExampleCollector {
for (k, v) in props { for (k, v) in props {
assert_eq!(v, props.get(k).unwrap()); assert_eq!(v, props.get(k).unwrap());
} }
assert!(props.get(&[Props::NumKeys as u8, Props::NumPuts as u8]).is_none()); assert!(
props
.get(&[Props::NumKeys as u8, Props::NumPuts as u8])
.is_none()
);
assert!(props.len() >= 4); assert!(props.len() >= 4);
c c
...@@ -91,12 +95,14 @@ impl ExampleCollector { ...@@ -91,12 +95,14 @@ impl ExampleCollector {
impl fmt::Display for ExampleCollector { impl fmt::Display for ExampleCollector {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, write!(
f,
"keys={}, puts={}, merges={}, deletes={}", "keys={}, puts={}, merges={}, deletes={}",
self.num_keys, self.num_keys,
self.num_puts, self.num_puts,
self.num_merges, self.num_merges,
self.num_deletes) self.num_deletes
)
} }
} }
...@@ -110,8 +116,7 @@ impl TablePropertiesCollector for ExampleCollector { ...@@ -110,8 +116,7 @@ impl TablePropertiesCollector for ExampleCollector {
match entry_type { match entry_type {
DBEntryType::Put => self.num_puts += 1, DBEntryType::Put => self.num_puts += 1,
DBEntryType::Merge => self.num_merges += 1, DBEntryType::Merge => self.num_merges += 1,
DBEntryType::Delete | DBEntryType::Delete | DBEntryType::SingleDelete => self.num_deletes += 1,
DBEntryType::SingleDelete => self.num_deletes += 1,
DBEntryType::Other => {} DBEntryType::Other => {}
} }
} }
...@@ -135,12 +140,14 @@ impl TablePropertiesCollectorFactory for ExampleFactory { ...@@ -135,12 +140,14 @@ impl TablePropertiesCollectorFactory for ExampleFactory {
} }
} }
fn check_collection(collection: &TablePropertiesCollection, fn check_collection(
collection: &TablePropertiesCollection,
num_files: usize, num_files: usize,
num_keys: u32, num_keys: u32,
num_puts: u32, num_puts: u32,
num_merges: u32, num_merges: u32,
num_deletes: u32) { num_deletes: u32,
) {
let mut res = ExampleCollector::new(); let mut res = ExampleCollector::new();
assert!(!collection.is_empty()); assert!(!collection.is_empty());
let props: HashMap<_, _> = collection.iter().collect(); let props: HashMap<_, _> = collection.iter().collect();
...@@ -166,16 +173,19 @@ fn test_table_properties_collector_factory() { ...@@ -166,16 +173,19 @@ fn test_table_properties_collector_factory() {
cf_opts.add_table_properties_collector_factory("example-collector", Box::new(f)); cf_opts.add_table_properties_collector_factory("example-collector", Box::new(f));
let path = TempDir::new("_rust_rocksdb_collectortest").expect(""); let path = TempDir::new("_rust_rocksdb_collectortest").expect("");
let db = DB::open_cf(opts, let db = DB::open_cf(
opts,
path.path().to_str().unwrap(), path.path().to_str().unwrap(),
vec!["default"], vec!["default"],
vec![cf_opts]) vec![cf_opts],
.unwrap(); ).unwrap();
let samples = vec![(b"key1".to_vec(), b"value1".to_vec()), let samples = vec![
(b"key1".to_vec(), b"value1".to_vec()),
(b"key2".to_vec(), b"value2".to_vec()), (b"key2".to_vec(), b"value2".to_vec()),
(b"key3".to_vec(), b"value3".to_vec()), (b"key3".to_vec(), b"value3".to_vec()),
(b"key4".to_vec(), b"value4".to_vec())]; (b"key4".to_vec(), b"value4".to_vec()),
];
// Put 4 keys. // Put 4 keys.
for &(ref k, ref v) in &samples { for &(ref k, ref v) in &samples {
......
prepare-rustfmt: prepare-rustfmt:
curl -L https://github.com/tennix/rustfmt/releases/download/v0.6/rustfmt-${RUSTFMT_VERSION}-linux-amd64.tar.gz -o rustfmt-${RUSTFMT_VERSION}-linux-amd64.tar.gz && \ @[[ "`cargo fmt -- --version`" = "0.2.1-nightly ( )" ]] || cargo install --vers 0.2.1 --force rustfmt-nightly || exit 0
mkdir -p ${HOME}/.cargo/bin && tar xzf rustfmt-${RUSTFMT_VERSION}-linux-amd64.tar.gz -C ${HOME}/.cargo/bin --strip-components=1 \ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment