Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
R
rust-rocksdb
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
fangzongwu
rust-rocksdb
Commits
1953d1fa
Commit
1953d1fa
authored
Oct 01, 2016
by
Jay
Committed by
GitHub
Oct 01, 2016
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
change width to default 100 (#47)
parent
4c1b1655
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
15 changed files
with
92 additions
and
113 deletions
+92
-113
lib.rs
librocksdb_sys/src/lib.rs
+0
-0
rustfmt.toml
rustfmt.toml
+0
-2
compaction_filter.rs
src/compaction_filter.rs
+8
-10
comparator.rs
src/comparator.rs
+5
-8
lib.rs
src/lib.rs
+4
-6
main.rs
src/main.rs
+8
-12
merge_operator.rs
src/merge_operator.rs
+13
-22
rocksdb.rs
src/rocksdb.rs
+0
-0
rocksdb_options.rs
src/rocksdb_options.rs
+0
-0
test_column_family.rs
test/test_column_family.rs
+2
-5
test_compact_range.rs
test/test_compact_range.rs
+21
-21
test_compaction_filter.rs
test/test_compaction_filter.rs
+17
-11
test_iterator.rs
test/test_iterator.rs
+5
-7
test_multithreaded.rs
test/test_multithreaded.rs
+1
-1
test_rocksdb_options.rs
test/test_rocksdb_options.rs
+8
-8
No files found.
librocksdb_sys/src/lib.rs
View file @
1953d1fa
This diff is collapsed.
Click to expand it.
rustfmt.toml
View file @
1953d1fa
reorder_imports
=
true
max_width
=
80
ideal_width
=
80
src/compaction_filter.rs
View file @
1953d1fa
...
...
@@ -66,20 +66,18 @@ impl Drop for CompactionFilterHandle {
}
}
pub
unsafe
fn
new_compaction_filter
(
c_name
:
CString
,
ignore_snapshots
:
bool
,
f
:
Box
<
CompactionFilter
>
)
->
Result
<
CompactionFilterHandle
,
String
>
{
pub
unsafe
fn
new_compaction_filter
(
c_name
:
CString
,
ignore_snapshots
:
bool
,
f
:
Box
<
CompactionFilter
>
)
->
Result
<
CompactionFilterHandle
,
String
>
{
let
proxy
=
Box
::
into_raw
(
Box
::
new
(
CompactionFilterProxy
{
name
:
c_name
,
filter
:
f
,
}));
let
filter
=
rocksdb_ffi
::
rocksdb_compactionfilter_create
(
proxy
as
*
mut
c_void
,
destructor
,
filter
,
name
);
let
filter
=
rocksdb_ffi
::
rocksdb_compactionfilter_create
(
proxy
as
*
mut
c_void
,
destructor
,
filter
,
name
);
rocksdb_ffi
::
rocksdb_compactionfilter_set_ignore_snapshots
(
filter
,
ignore_snapshots
);
Ok
(
CompactionFilterHandle
{
inner
:
filter
})
}
src/comparator.rs
View file @
1953d1fa
...
...
@@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
//
use
libc
::{
c_char
,
c_int
,
c_void
,
size_t
};
use
std
::
ffi
::
CString
;
use
std
::
mem
;
...
...
@@ -29,8 +30,7 @@ pub extern "C" fn destructor_callback(raw_cb: *mut c_void) {
pub
extern
"C"
fn
name_callback
(
raw_cb
:
*
mut
c_void
)
->
*
const
c_char
{
unsafe
{
let
cb
:
&
mut
ComparatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
ComparatorCallback
);
let
cb
:
&
mut
ComparatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
ComparatorCallback
);
let
ptr
=
cb
.name
.as_ptr
();
ptr
as
*
const
c_char
}
...
...
@@ -43,12 +43,9 @@ pub extern "C" fn compare_callback(raw_cb: *mut c_void,
b_len
:
size_t
)
->
c_int
{
unsafe
{
let
cb
:
&
mut
ComparatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
ComparatorCallback
);
let
a
:
&
[
u8
]
=
slice
::
from_raw_parts
(
a_raw
as
*
const
u8
,
a_len
as
usize
);
let
b
:
&
[
u8
]
=
slice
::
from_raw_parts
(
b_raw
as
*
const
u8
,
b_len
as
usize
);
let
cb
:
&
mut
ComparatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
ComparatorCallback
);
let
a
:
&
[
u8
]
=
slice
::
from_raw_parts
(
a_raw
as
*
const
u8
,
a_len
as
usize
);
let
b
:
&
[
u8
]
=
slice
::
from_raw_parts
(
b_raw
as
*
const
u8
,
b_len
as
usize
);
(
cb
.f
)(
a
,
b
)
}
}
src/lib.rs
View file @
1953d1fa
...
...
@@ -27,10 +27,8 @@ pub mod comparator;
mod
compaction_filter
;
pub
use
compaction_filter
::
CompactionFilter
;
pub
use
librocksdb_sys
::{
DBCompactionStyle
,
DBCompressionType
,
DBRecoveryMode
,
new_bloom_filter
,
self
as
rocksdb_ffi
};
pub
use
librocksdb_sys
::{
DBCompactionStyle
,
DBCompressionType
,
DBRecoveryMode
,
new_bloom_filter
,
self
as
rocksdb_ffi
};
pub
use
merge_operator
::
MergeOperands
;
pub
use
rocksdb
::{
DB
,
DBIterator
,
DBVector
,
Kv
,
SeekKey
,
Writable
,
WriteBatch
,
CFHandle
,
Range
};
pub
use
rocksdb_options
::{
BlockBasedOptions
,
Options
,
ReadOptions
,
WriteOptions
};
pub
use
rocksdb
::{
DB
,
DBIterator
,
DBVector
,
Kv
,
SeekKey
,
Writable
,
WriteBatch
,
CFHandle
,
Range
};
pub
use
rocksdb_options
::{
BlockBasedOptions
,
Options
,
ReadOptions
,
WriteOptions
};
src/main.rs
View file @
1953d1fa
...
...
@@ -66,10 +66,7 @@ fn main() {
custom_merge
();
}
fn
concat_merge
(
_
:
&
[
u8
],
existing_val
:
Option
<&
[
u8
]
>
,
operands
:
&
mut
MergeOperands
)
->
Vec
<
u8
>
{
fn
concat_merge
(
_
:
&
[
u8
],
existing_val
:
Option
<&
[
u8
]
>
,
operands
:
&
mut
MergeOperands
)
->
Vec
<
u8
>
{
let
mut
result
:
Vec
<
u8
>
=
Vec
::
with_capacity
(
operands
.size_hint
()
.
0
);
match
existing_val
{
Some
(
v
)
=>
{
...
...
@@ -152,14 +149,13 @@ mod tests {
opts
:
&
mut
Options
,
blockopts
:
&
mut
BlockBasedOptions
)
->
DB
{
let
per_level_compression
:
[
DBCompressionType
;
7
]
=
[
DBCompressionType
::
DBNo
,
DBCompressionType
::
DBNo
,
DBCompressionType
::
DBNo
,
DBCompressionType
::
DBLz4
,
DBCompressionType
::
DBLz4
,
DBCompressionType
::
DBLz4
,
DBCompressionType
::
DBLz4
];
let
per_level_compression
:
[
DBCompressionType
;
7
]
=
[
DBCompressionType
::
DBNo
,
DBCompressionType
::
DBNo
,
DBCompressionType
::
DBNo
,
DBCompressionType
::
DBLz4
,
DBCompressionType
::
DBLz4
,
DBCompressionType
::
DBLz4
,
DBCompressionType
::
DBLz4
];
opts
.create_if_missing
(
true
);
opts
.set_max_open_files
(
10000
);
...
...
src/merge_operator.rs
View file @
1953d1fa
...
...
@@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
//
use
libc
::{
self
,
c_char
,
c_int
,
c_void
,
size_t
};
use
std
::
ffi
::
CString
;
use
std
::
mem
;
...
...
@@ -34,8 +35,7 @@ pub extern "C" fn destructor_callback(raw_cb: *mut c_void) {
pub
extern
"C"
fn
name_callback
(
raw_cb
:
*
mut
c_void
)
->
*
const
c_char
{
unsafe
{
let
cb
:
&
mut
MergeOperatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
MergeOperatorCallback
);
let
cb
:
&
mut
MergeOperatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
MergeOperatorCallback
);
let
ptr
=
cb
.name
.as_ptr
();
ptr
as
*
const
c_char
}
...
...
@@ -53,13 +53,9 @@ pub extern "C" fn full_merge_callback(raw_cb: *mut c_void,
new_value_length
:
*
mut
size_t
)
->
*
const
c_char
{
unsafe
{
let
cb
:
&
mut
MergeOperatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
MergeOperatorCallback
);
let
operands
=
&
mut
MergeOperands
::
new
(
operands_list
,
operands_list_len
,
num_operands
);
let
key
:
&
[
u8
]
=
slice
::
from_raw_parts
(
raw_key
as
*
const
u8
,
key_len
as
usize
);
let
cb
:
&
mut
MergeOperatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
MergeOperatorCallback
);
let
operands
=
&
mut
MergeOperands
::
new
(
operands_list
,
operands_list_len
,
num_operands
);
let
key
:
&
[
u8
]
=
slice
::
from_raw_parts
(
raw_key
as
*
const
u8
,
key_len
as
usize
);
let
oldval
:
&
[
u8
]
=
slice
::
from_raw_parts
(
existing_value
as
*
const
u8
,
existing_value_len
as
usize
);
let
mut
result
=
(
cb
.merge_fn
)(
key
,
Some
(
oldval
),
operands
);
...
...
@@ -84,13 +80,9 @@ pub extern "C" fn partial_merge_callback(raw_cb: *mut c_void,
new_value_length
:
*
mut
size_t
)
->
*
const
c_char
{
unsafe
{
let
cb
:
&
mut
MergeOperatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
MergeOperatorCallback
);
let
operands
=
&
mut
MergeOperands
::
new
(
operands_list
,
operands_list_len
,
num_operands
);
let
key
:
&
[
u8
]
=
slice
::
from_raw_parts
(
raw_key
as
*
const
u8
,
key_len
as
usize
);
let
cb
:
&
mut
MergeOperatorCallback
=
&
mut
*
(
raw_cb
as
*
mut
MergeOperatorCallback
);
let
operands
=
&
mut
MergeOperands
::
new
(
operands_list
,
operands_list_len
,
num_operands
);
let
key
:
&
[
u8
]
=
slice
::
from_raw_parts
(
raw_key
as
*
const
u8
,
key_len
as
usize
);
let
mut
result
=
(
cb
.merge_fn
)(
key
,
None
,
operands
);
result
.shrink_to_fit
();
// TODO(tan) investigate zero-copy techniques to improve performance
...
...
@@ -137,13 +129,12 @@ impl<'a> Iterator for &'a mut MergeOperands {
let
base_len
=
self
.operands_list_len
as
usize
;
let
spacing
=
mem
::
size_of
::
<*
const
*
const
u8
>
();
let
spacing_len
=
mem
::
size_of
::
<*
const
size_t
>
();
let
len_ptr
=
(
base_len
+
(
spacing_len
*
self
.cursor
))
as
*
const
size_t
;
let
len_ptr
=
(
base_len
+
(
spacing_len
*
self
.cursor
))
as
*
const
size_t
;
let
len
=
*
len_ptr
as
usize
;
let
ptr
=
base
+
(
spacing
*
self
.cursor
);
self
.cursor
+=
1
;
Some
(
mem
::
transmute
(
slice
::
from_raw_parts
(
*
(
ptr
as
*
const
*
const
u8
)
as
*
const
u8
,
len
)))
Some
(
mem
::
transmute
(
slice
::
from_raw_parts
(
*
(
ptr
as
*
const
*
const
u8
)
as
*
const
u8
,
len
)))
}
}
}
...
...
@@ -156,9 +147,9 @@ impl<'a> Iterator for &'a mut MergeOperands {
#[cfg(test)]
mod
test
{
use
super
::
*
;
use
rocksdb_options
::
Options
;
use
rocksdb
::{
DB
,
DBVector
,
Writable
};
use
rocksdb_options
::
Options
;
use
super
::
*
;
use
tempdir
::
TempDir
;
#[allow(unused_variables)]
...
...
src/rocksdb.rs
View file @
1953d1fa
This diff is collapsed.
Click to expand it.
src/rocksdb_options.rs
View file @
1953d1fa
This diff is collapsed.
Click to expand it.
test/test_column_family.rs
View file @
1953d1fa
...
...
@@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
//
use
rocksdb
::{
DB
,
MergeOperands
,
Options
,
Writable
};
use
tempdir
::
TempDir
;
...
...
@@ -114,11 +115,7 @@ pub fn test_column_family() {
}
// should b able to drop a cf
{
let
mut
db
=
DB
::
open_cf
(
&
Options
::
new
(),
path_str
,
&
[
"cf1"
],
&
[
&
Options
::
new
()])
.unwrap
();
let
mut
db
=
DB
::
open_cf
(
&
Options
::
new
(),
path_str
,
&
[
"cf1"
],
&
[
&
Options
::
new
()])
.unwrap
();
match
db
.drop_cf
(
"cf1"
)
{
Ok
(
_
)
=>
println!
(
"cf1 successfully dropped."
),
Err
(
e
)
=>
panic!
(
"failed to drop column family: {}"
,
e
),
...
...
test/test_compact_range.rs
View file @
1953d1fa
use
tempdir
::
TempDir
;
use
rocksdb
::{
DB
,
Options
,
Range
,
Writable
};
use
rocksdb
::{
DB
,
Options
,
Range
,
Writable
};
use
tempdir
::
TempDir
;
#[test]
fn
test_compact_range
()
{
let
path
=
TempDir
::
new
(
"_rust_rocksdb_test_compact_range"
)
.expect
(
""
);
let
mut
opts
=
Options
::
new
();
opts
.create_if_missing
(
true
);
let
db
=
DB
::
open
(
&
opts
,
path
.path
()
.to_str
()
.unwrap
())
.unwrap
();
let
samples
=
vec!
[
let
path
=
TempDir
::
new
(
"_rust_rocksdb_test_compact_range"
)
.expect
(
""
);
let
mut
opts
=
Options
::
new
();
opts
.create_if_missing
(
true
);
let
db
=
DB
::
open
(
&
opts
,
path
.path
()
.to_str
()
.unwrap
())
.unwrap
();
let
samples
=
vec!
[
(
b
"k1"
.to_vec
(),
b
"value--------1"
.to_vec
()),
(
b
"k2"
.to_vec
(),
b
"value--------2"
.to_vec
()),
(
b
"k3"
.to_vec
(),
b
"value--------3"
.to_vec
()),
(
b
"k4"
.to_vec
(),
b
"value--------4"
.to_vec
()),
(
b
"k5"
.to_vec
(),
b
"value--------5"
.to_vec
()),
];
for
&
(
ref
k
,
ref
v
)
in
&
samples
{
db
.put
(
k
,
v
)
.unwrap
();
assert_eq!
(
v
.as_slice
(),
&*
db
.get
(
k
)
.unwrap
()
.unwrap
());
}
for
&
(
ref
k
,
ref
v
)
in
&
samples
{
db
.put
(
k
,
v
)
.unwrap
();
assert_eq!
(
v
.as_slice
(),
&*
db
.get
(
k
)
.unwrap
()
.unwrap
());
}
// flush memtable to sst file
db
.flush
(
true
)
.unwrap
();
let
old_size
=
db
.get_approximate_sizes
(
&
[
Range
::
new
(
b
"k0"
,
b
"k6"
)])[
0
];
// flush memtable to sst file
db
.flush
(
true
)
.unwrap
();
let
old_size
=
db
.get_approximate_sizes
(
&
[
Range
::
new
(
b
"k0"
,
b
"k6"
)])[
0
];
// delete all and compact whole range
for
&
(
ref
k
,
_
)
in
&
samples
{
db
.delete
(
k
)
.unwrap
()
}
db
.compact_range
(
None
,
None
);
let
new_size
=
db
.get_approximate_sizes
(
&
[
Range
::
new
(
b
"k0"
,
b
"k6"
)])[
0
];
assert
!
(
old_size
>
new_size
);
// delete all and compact whole range
for
&
(
ref
k
,
_
)
in
&
samples
{
db
.delete
(
k
)
.unwrap
()
}
db
.compact_range
(
None
,
None
);
let
new_size
=
db
.get_approximate_sizes
(
&
[
Range
::
new
(
b
"k0"
,
b
"k6"
)])[
0
];
assert
!
(
old_size
>
new_size
);
}
test/test_compaction_filter.rs
View file @
1953d1fa
use
tempdir
::
TempDir
;
use
std
::
sync
::{
Arc
,
RwLock
};
use
std
::
sync
::
atomic
::{
AtomicBool
,
Ordering
};
use
rocksdb
::{
Writable
,
DB
,
CompactionFilter
,
Options
};
use
std
::
sync
::{
Arc
,
RwLock
};
use
std
::
sync
::
atomic
::{
AtomicBool
,
Ordering
};
use
tempdir
::
TempDir
;
struct
Filter
{
drop_called
:
Arc
<
AtomicBool
>
,
...
...
@@ -29,10 +29,13 @@ fn test_compaction_filter() {
let
drop_called
=
Arc
::
new
(
AtomicBool
::
new
(
false
));
let
filtered_kvs
=
Arc
::
new
(
RwLock
::
new
(
vec!
[]));
// set ignore_snapshots to false
opts
.set_compaction_filter
(
"test"
,
false
,
Box
::
new
(
Filter
{
drop_called
:
drop_called
.clone
(),
filtered_kvs
:
filtered_kvs
.clone
(),
}))
.unwrap
();
opts
.set_compaction_filter
(
"test"
,
false
,
Box
::
new
(
Filter
{
drop_called
:
drop_called
.clone
(),
filtered_kvs
:
filtered_kvs
.clone
(),
}))
.unwrap
();
opts
.create_if_missing
(
true
);
let
db
=
DB
::
open
(
&
opts
,
path
.path
()
.to_str
()
.unwrap
())
.unwrap
();
let
samples
=
vec!
[
...
...
@@ -56,10 +59,13 @@ fn test_compaction_filter() {
drop
(
db
);
// reregister with ignore_snapshots set to true
opts
.set_compaction_filter
(
"test"
,
true
,
Box
::
new
(
Filter
{
drop_called
:
drop_called
.clone
(),
filtered_kvs
:
filtered_kvs
.clone
(),
}))
.unwrap
();
opts
.set_compaction_filter
(
"test"
,
true
,
Box
::
new
(
Filter
{
drop_called
:
drop_called
.clone
(),
filtered_kvs
:
filtered_kvs
.clone
(),
}))
.unwrap
();
assert
!
(
drop_called
.load
(
Ordering
::
Relaxed
));
drop_called
.store
(
false
,
Ordering
::
Relaxed
);
{
...
...
test/test_iterator.rs
View file @
1953d1fa
...
...
@@ -29,12 +29,11 @@ pub fn test_iterator() {
assert
!
(
p
.is_ok
());
let
p
=
db
.put
(
k3
,
v3
);
assert
!
(
p
.is_ok
());
let
expected
=
vec!
[(
k1
.to_vec
(),
v1
.to_vec
()),
(
k2
.to_vec
(),
v2
.to_vec
()),
(
k3
.to_vec
(),
v3
.to_vec
())];
let
expected
=
vec!
[(
k1
.to_vec
(),
v1
.to_vec
()),
(
k2
.to_vec
(),
v2
.to_vec
()),
(
k3
.to_vec
(),
v3
.to_vec
())];
let
mut
iter
=
db
.iter
();
iter
.seek
(
SeekKey
::
Start
);
assert_eq!
(
iter
.collect
::
<
Vec
<
_
>>
(),
expected
);
...
...
@@ -75,9 +74,8 @@ pub fn test_iterator() {
assert_eq!
(
iter
.collect
::
<
Vec
<
_
>>
(),
expected2
);
iter
.seek
(
SeekKey
::
Key
(
k2
));
let
expected
=
vec!
[(
k2
.to_vec
(),
v2
.to_vec
()),
(
k3
.to_vec
(),
v3
.to_vec
()),
(
k4
.to_vec
(),
v4
.to_vec
())];
let
expected
=
vec!
[(
k2
.to_vec
(),
v2
.to_vec
()),
(
k3
.to_vec
(),
v3
.to_vec
()),
(
k4
.to_vec
(),
v4
.to_vec
())];
assert_eq!
(
iter
.collect
::
<
Vec
<
_
>>
(),
expected
);
iter
.seek
(
SeekKey
::
Key
(
k2
));
...
...
test/test_multithreaded.rs
View file @
1953d1fa
use
rocksdb
::{
DB
,
Writable
};
use
std
::
thread
;
use
std
::
sync
::
Arc
;
use
std
::
thread
;
use
tempdir
::
TempDir
;
const
N
:
usize
=
100
_000
;
...
...
test/test_rocksdb_options.rs
View file @
1953d1fa
use
tempdir
::
TempDir
;
use
rocksdb
::{
DB
,
Options
};
use
rocksdb
::{
DB
,
Options
};
use
tempdir
::
TempDir
;
#[test]
fn
test_set_num_levels
()
{
let
path
=
TempDir
::
new
(
"_rust_rocksdb_test_set_num_levels"
)
.expect
(
""
);
let
mut
opts
=
Options
::
new
();
opts
.create_if_missing
(
true
);
opts
.set_num_levels
(
2
);
let
db
=
DB
::
open
(
&
opts
,
path
.path
()
.to_str
()
.unwrap
())
.unwrap
();
drop
(
db
);
let
path
=
TempDir
::
new
(
"_rust_rocksdb_test_set_num_levels"
)
.expect
(
""
);
let
mut
opts
=
Options
::
new
();
opts
.create_if_missing
(
true
);
opts
.set_num_levels
(
2
);
let
db
=
DB
::
open
(
&
opts
,
path
.path
()
.to_str
()
.unwrap
())
.unwrap
();
drop
(
db
);
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment