Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
R
rust-rocksdb
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
fangzongwu
rust-rocksdb
Commits
a7fdf42e
Commit
a7fdf42e
authored
Nov 06, 2015
by
Steve Klabnik
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fix all warnings
parent
a2334ab3
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
48 additions
and
54 deletions
+48
-54
lib.rs
src/lib.rs
+0
-3
main.rs
src/main.rs
+11
-12
test_column_family.rs
test/test_column_family.rs
+10
-10
test_iterator.rs
test/test_iterator.rs
+16
-18
test_multithreaded.rs
test/test_multithreaded.rs
+11
-11
No files found.
src/lib.rs
View file @
a7fdf42e
...
@@ -13,9 +13,6 @@
...
@@ -13,9 +13,6 @@
See the License for the specific language governing permissions and
See the License for the specific language governing permissions and
limitations under the License.
limitations under the License.
*/
*/
#
!
[
crate_id
=
"rocksdb"
]
#
!
[
crate_type
=
"lib"
]
pub
use
ffi
as
rocksdb_ffi
;
pub
use
ffi
as
rocksdb_ffi
;
pub
use
ffi
::{
new_bloom_filter
,
DBCompactionStyle
,
DBComparator
};
pub
use
ffi
::{
new_bloom_filter
,
DBCompactionStyle
,
DBComparator
};
pub
use
rocksdb
::{
DB
,
DBResult
,
DBVector
,
WriteBatch
,
Writable
,
Direction
};
pub
use
rocksdb
::{
DB
,
DBResult
,
DBVector
,
WriteBatch
,
Writable
,
Direction
};
...
...
src/main.rs
View file @
a7fdf42e
...
@@ -14,8 +14,7 @@
...
@@ -14,8 +14,7 @@
limitations under the License.
limitations under the License.
*/
*/
extern
crate
rocksdb
;
extern
crate
rocksdb
;
use
rocksdb
::{
Options
,
DB
,
MergeOperands
,
new_bloom_filter
,
Writable
,
};
use
rocksdb
::{
Options
,
DB
,
MergeOperands
,
Writable
,
};
use
rocksdb
::
DBCompactionStyle
::
DBUniversalCompaction
;
//fn snapshot_test() {
//fn snapshot_test() {
// let path = "_rust_rocksdb_iteratortest";
// let path = "_rust_rocksdb_iteratortest";
...
@@ -47,7 +46,7 @@ use rocksdb::DBCompactionStyle::DBUniversalCompaction;
...
@@ -47,7 +46,7 @@ use rocksdb::DBCompactionStyle::DBUniversalCompaction;
#[cfg(not(feature
=
"valgrind"
))]
#[cfg(not(feature
=
"valgrind"
))]
fn
main
()
{
fn
main
()
{
let
path
=
"/tmp/rust-rocksdb"
;
let
path
=
"/tmp/rust-rocksdb"
;
let
mut
db
=
DB
::
open_default
(
path
)
.unwrap
();
let
db
=
DB
::
open_default
(
path
)
.unwrap
();
assert
!
(
db
.put
(
b
"my key"
,
b
"my value"
)
.is_ok
());
assert
!
(
db
.put
(
b
"my key"
,
b
"my value"
)
.is_ok
());
db
.get
(
b
"my key"
)
.map
(
|
value
|
{
db
.get
(
b
"my key"
)
.map
(
|
value
|
{
match
value
.to_utf8
()
{
match
value
.to_utf8
()
{
...
@@ -65,8 +64,8 @@ fn main() {
...
@@ -65,8 +64,8 @@ fn main() {
custom_merge
();
custom_merge
();
}
}
fn
concat_merge
(
new_key
:
&
[
u8
],
existing_val
:
Option
<&
[
u8
]
>
,
fn
concat_merge
(
_
:
&
[
u8
],
existing_val
:
Option
<&
[
u8
]
>
,
mut
operands
:
&
mut
MergeOperands
)
->
Vec
<
u8
>
{
operands
:
&
mut
MergeOperands
)
->
Vec
<
u8
>
{
let
mut
result
:
Vec
<
u8
>
=
Vec
::
with_capacity
(
operands
.size_hint
()
.
0
);
let
mut
result
:
Vec
<
u8
>
=
Vec
::
with_capacity
(
operands
.size_hint
()
.
0
);
match
existing_val
{
match
existing_val
{
Some
(
v
)
=>
for
e
in
v
{
Some
(
v
)
=>
for
e
in
v
{
...
@@ -88,13 +87,13 @@ fn custom_merge() {
...
@@ -88,13 +87,13 @@ fn custom_merge() {
opts
.create_if_missing
(
true
);
opts
.create_if_missing
(
true
);
opts
.add_merge_operator
(
"test operator"
,
concat_merge
);
opts
.add_merge_operator
(
"test operator"
,
concat_merge
);
{
{
let
mut
db
=
DB
::
open
(
&
opts
,
path
)
.unwrap
();
let
db
=
DB
::
open
(
&
opts
,
path
)
.unwrap
();
db
.put
(
b
"k1"
,
b
"a"
);
db
.put
(
b
"k1"
,
b
"a"
)
.unwrap
()
;
db
.merge
(
b
"k1"
,
b
"b"
);
db
.merge
(
b
"k1"
,
b
"b"
)
.unwrap
()
;
db
.merge
(
b
"k1"
,
b
"c"
);
db
.merge
(
b
"k1"
,
b
"c"
)
.unwrap
()
;
db
.merge
(
b
"k1"
,
b
"d"
);
db
.merge
(
b
"k1"
,
b
"d"
)
.unwrap
()
;
db
.merge
(
b
"k1"
,
b
"efg"
);
db
.merge
(
b
"k1"
,
b
"efg"
)
.unwrap
()
;
db
.merge
(
b
"k1"
,
b
"h"
);
db
.merge
(
b
"k1"
,
b
"h"
)
.unwrap
()
;
db
.get
(
b
"k1"
)
.map
(
|
value
|
{
db
.get
(
b
"k1"
)
.map
(
|
value
|
{
match
value
.to_utf8
()
{
match
value
.to_utf8
()
{
Some
(
v
)
=>
Some
(
v
)
=>
...
...
test/test_column_family.rs
View file @
a7fdf42e
...
@@ -13,7 +13,7 @@
...
@@ -13,7 +13,7 @@
See the License for the specific language governing permissions and
See the License for the specific language governing permissions and
limitations under the License.
limitations under the License.
*/
*/
use
rocksdb
::{
Options
,
DB
,
DBResult
,
Writable
,
Direction
,
MergeOperands
};
use
rocksdb
::{
Options
,
DB
,
Writable
,
MergeOperands
};
#[test]
#[test]
pub
fn
test_column_family
()
{
pub
fn
test_column_family
()
{
...
@@ -58,7 +58,7 @@ pub fn test_column_family() {
...
@@ -58,7 +58,7 @@ pub fn test_column_family() {
{
{
let
mut
opts
=
Options
::
new
();
let
mut
opts
=
Options
::
new
();
opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
let
mut
db
=
match
DB
::
open_cf
(
&
opts
,
path
,
&
[
"cf1"
])
{
let
db
=
match
DB
::
open_cf
(
&
opts
,
path
,
&
[
"cf1"
])
{
Ok
(
db
)
=>
{
Ok
(
db
)
=>
{
println!
(
"successfully opened db with column family"
);
println!
(
"successfully opened db with column family"
);
db
db
...
@@ -70,10 +70,10 @@ pub fn test_column_family() {
...
@@ -70,10 +70,10 @@ pub fn test_column_family() {
assert
!
(
db
.get_cf
(
cf1
,
b
"k1"
)
.unwrap
()
.to_utf8
()
.unwrap
()
==
"v1"
);
assert
!
(
db
.get_cf
(
cf1
,
b
"k1"
)
.unwrap
()
.to_utf8
()
.unwrap
()
==
"v1"
);
let
p
=
db
.put_cf
(
cf1
,
b
"k1"
,
b
"a"
);
let
p
=
db
.put_cf
(
cf1
,
b
"k1"
,
b
"a"
);
assert
!
(
p
.is_ok
());
assert
!
(
p
.is_ok
());
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"b"
);
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"b"
)
.unwrap
()
;
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"c"
);
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"c"
)
.unwrap
()
;
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"d"
);
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"d"
)
.unwrap
()
;
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"efg"
);
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"efg"
)
.unwrap
()
;
let
m
=
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"h"
);
let
m
=
db
.merge_cf
(
cf1
,
b
"k1"
,
b
"h"
);
println!
(
"m is {:?}"
,
m
);
println!
(
"m is {:?}"
,
m
);
// TODO assert!(m.is_ok());
// TODO assert!(m.is_ok());
...
@@ -85,9 +85,9 @@ pub fn test_column_family() {
...
@@ -85,9 +85,9 @@ pub fn test_column_family() {
println!
(
"did not read valid utf-8 out of the db"
),
println!
(
"did not read valid utf-8 out of the db"
),
}
}
})
.on_absent
(
||
{
println!
(
"value not present!"
)
})
})
.on_absent
(
||
{
println!
(
"value not present!"
)
})
.on_error
(
|
e
|
{
println!
(
"error reading value"
)});
//: {", e) });
.on_error
(
|
_
|
{
println!
(
"error reading value"
)});
//: {", e) });
let
r
=
db
.get_cf
(
cf1
,
b
"k1"
);
let
_
=
db
.get_cf
(
cf1
,
b
"k1"
);
// TODO assert!(r.unwrap().to_utf8().unwrap() == "abcdefgh");
// TODO assert!(r.unwrap().to_utf8().unwrap() == "abcdefgh");
assert
!
(
db
.delete
(
b
"k1"
)
.is_ok
());
assert
!
(
db
.delete
(
b
"k1"
)
.is_ok
());
assert
!
(
db
.get
(
b
"k1"
)
.is_none
());
assert
!
(
db
.get
(
b
"k1"
)
.is_none
());
...
@@ -110,9 +110,9 @@ pub fn test_column_family() {
...
@@ -110,9 +110,9 @@ pub fn test_column_family() {
assert
!
(
DB
::
destroy
(
&
Options
::
new
(),
path
)
.is_ok
());
assert
!
(
DB
::
destroy
(
&
Options
::
new
(),
path
)
.is_ok
());
}
}
fn
test_provided_merge
(
new_key
:
&
[
u8
],
fn
test_provided_merge
(
_
:
&
[
u8
],
existing_val
:
Option
<&
[
u8
]
>
,
existing_val
:
Option
<&
[
u8
]
>
,
mut
operands
:
&
mut
MergeOperands
)
operands
:
&
mut
MergeOperands
)
->
Vec
<
u8
>
{
->
Vec
<
u8
>
{
let
nops
=
operands
.size_hint
()
.
0
;
let
nops
=
operands
.size_hint
()
.
0
;
let
mut
result
:
Vec
<
u8
>
=
Vec
::
with_capacity
(
nops
);
let
mut
result
:
Vec
<
u8
>
=
Vec
::
with_capacity
(
nops
);
...
...
test/test_iterator.rs
View file @
a7fdf42e
use
rocksdb
::{
Options
,
DB
,
Writable
,
Direction
};
use
rocksdb
::{
Options
,
DB
,
Writable
,
Direction
};
use
std
;
fn
cba
(
input
:
&
Box
<
[
u8
]
>
)
->
Box
<
[
u8
]
>
{
fn
cba
(
input
:
&
Box
<
[
u8
]
>
)
->
Box
<
[
u8
]
>
{
input
.iter
()
.cloned
()
.collect
::
<
Vec
<
_
>>
()
.into_boxed_slice
()
input
.iter
()
.cloned
()
.collect
::
<
Vec
<
_
>>
()
.into_boxed_slice
()
...
@@ -17,7 +16,7 @@ pub fn test_iterator() {
...
@@ -17,7 +16,7 @@ pub fn test_iterator() {
let
v2
:
Box
<
[
u8
]
>
=
b
"v2222"
.to_vec
()
.into_boxed_slice
();
let
v2
:
Box
<
[
u8
]
>
=
b
"v2222"
.to_vec
()
.into_boxed_slice
();
let
v3
:
Box
<
[
u8
]
>
=
b
"v3333"
.to_vec
()
.into_boxed_slice
();
let
v3
:
Box
<
[
u8
]
>
=
b
"v3333"
.to_vec
()
.into_boxed_slice
();
let
v4
:
Box
<
[
u8
]
>
=
b
"v4444"
.to_vec
()
.into_boxed_slice
();
let
v4
:
Box
<
[
u8
]
>
=
b
"v4444"
.to_vec
()
.into_boxed_slice
();
let
mut
db
=
DB
::
open_default
(
path
)
.unwrap
();
let
db
=
DB
::
open_default
(
path
)
.unwrap
();
let
p
=
db
.put
(
&*
k1
,
&*
v1
);
let
p
=
db
.put
(
&*
k1
,
&*
v1
);
assert
!
(
p
.is_ok
());
assert
!
(
p
.is_ok
());
let
p
=
db
.put
(
&*
k2
,
&*
v2
);
let
p
=
db
.put
(
&*
k2
,
&*
v2
);
...
@@ -27,83 +26,82 @@ pub fn test_iterator() {
...
@@ -27,83 +26,82 @@ pub fn test_iterator() {
let
mut
view1
=
db
.iterator
();
let
mut
view1
=
db
.iterator
();
let
expected
=
vec!
[(
cba
(
&
k1
),
cba
(
&
v1
)),
(
cba
(
&
k2
),
cba
(
&
v2
)),
(
cba
(
&
k3
),
cba
(
&
v3
))];
let
expected
=
vec!
[(
cba
(
&
k1
),
cba
(
&
v1
)),
(
cba
(
&
k2
),
cba
(
&
v2
)),
(
cba
(
&
k3
),
cba
(
&
v3
))];
{
{
let
mut
iterator1
=
view1
.from_start
();
let
iterator1
=
view1
.from_start
();
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
// Test that it's reusable a few times
// Test that it's reusable a few times
{
{
let
mut
iterator1
=
view1
.from_start
();
let
iterator1
=
view1
.from_start
();
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
{
{
let
mut
iterator1
=
view1
.from_start
();
let
iterator1
=
view1
.from_start
();
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
{
{
let
mut
iterator1
=
view1
.from_start
();
let
iterator1
=
view1
.from_start
();
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
// Test it in reverse a few times
// Test it in reverse a few times
{
{
let
mut
iterator1
=
view1
.from_end
();
let
iterator1
=
view1
.from_end
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
tmp_vec
.reverse
();
tmp_vec
.reverse
();
assert_eq!
(
tmp_vec
,
expected
);
assert_eq!
(
tmp_vec
,
expected
);
}
}
{
{
let
mut
iterator1
=
view1
.from_end
();
let
iterator1
=
view1
.from_end
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
tmp_vec
.reverse
();
tmp_vec
.reverse
();
assert_eq!
(
tmp_vec
,
expected
);
assert_eq!
(
tmp_vec
,
expected
);
}
}
{
{
let
mut
iterator1
=
view1
.from_end
();
let
iterator1
=
view1
.from_end
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
tmp_vec
.reverse
();
tmp_vec
.reverse
();
assert_eq!
(
tmp_vec
,
expected
);
assert_eq!
(
tmp_vec
,
expected
);
}
}
{
{
let
mut
iterator1
=
view1
.from_end
();
let
iterator1
=
view1
.from_end
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
tmp_vec
.reverse
();
tmp_vec
.reverse
();
assert_eq!
(
tmp_vec
,
expected
);
assert_eq!
(
tmp_vec
,
expected
);
}
}
{
{
let
mut
iterator1
=
view1
.from_end
();
let
iterator1
=
view1
.from_end
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
let
mut
tmp_vec
=
iterator1
.collect
::
<
Vec
<
_
>>
();
tmp_vec
.reverse
();
tmp_vec
.reverse
();
assert_eq!
(
tmp_vec
,
expected
);
assert_eq!
(
tmp_vec
,
expected
);
}
}
// Try it forward again
// Try it forward again
{
{
let
mut
iterator1
=
view1
.from_start
();
let
iterator1
=
view1
.from_start
();
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
{
{
let
mut
iterator1
=
view1
.from_start
();
let
iterator1
=
view1
.from_start
();
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
let
mut
view2
=
db
.iterator
();
let
p
=
db
.put
(
&*
k4
,
&*
v4
);
let
p
=
db
.put
(
&*
k4
,
&*
v4
);
assert
!
(
p
.is_ok
());
assert
!
(
p
.is_ok
());
let
mut
view3
=
db
.iterator
();
let
mut
view3
=
db
.iterator
();
let
expected2
=
vec!
[(
cba
(
&
k1
),
cba
(
&
v1
)),
(
cba
(
&
k2
),
cba
(
&
v2
)),
(
cba
(
&
k3
),
cba
(
&
v3
)),
(
cba
(
&
k4
),
cba
(
&
v4
))];
let
expected2
=
vec!
[(
cba
(
&
k1
),
cba
(
&
v1
)),
(
cba
(
&
k2
),
cba
(
&
v2
)),
(
cba
(
&
k3
),
cba
(
&
v3
)),
(
cba
(
&
k4
),
cba
(
&
v4
))];
{
{
let
mut
iterator1
=
view1
.from_start
();
let
iterator1
=
view1
.from_start
();
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
{
{
let
mut
iterator1
=
view3
.from_start
();
let
iterator1
=
view3
.from_start
();
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected2
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected2
);
}
}
{
{
let
mut
iterator1
=
view3
.from
(
b
"k2"
,
Direction
::
forward
);
let
iterator1
=
view3
.from
(
b
"k2"
,
Direction
::
forward
);
let
expected
=
vec!
[(
cba
(
&
k2
),
cba
(
&
v2
)),
(
cba
(
&
k3
),
cba
(
&
v3
)),
(
cba
(
&
k4
),
cba
(
&
v4
))];
let
expected
=
vec!
[(
cba
(
&
k2
),
cba
(
&
v2
)),
(
cba
(
&
k3
),
cba
(
&
v3
)),
(
cba
(
&
k4
),
cba
(
&
v4
))];
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
{
{
let
mut
iterator1
=
view3
.from
(
b
"k2"
,
Direction
::
reverse
);
let
iterator1
=
view3
.from
(
b
"k2"
,
Direction
::
reverse
);
let
expected
=
vec!
[(
cba
(
&
k2
),
cba
(
&
v2
)),
(
cba
(
&
k1
),
cba
(
&
v1
))];
let
expected
=
vec!
[(
cba
(
&
k2
),
cba
(
&
v2
)),
(
cba
(
&
k1
),
cba
(
&
v1
))];
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
assert_eq!
(
iterator1
.collect
::
<
Vec
<
_
>>
(),
expected
);
}
}
...
...
test/test_multithreaded.rs
View file @
a7fdf42e
use
rocksdb
::{
Options
,
DB
,
Writable
,
D
irection
,
D
BResult
};
use
rocksdb
::{
Options
,
DB
,
Writable
,
DBResult
};
use
std
::
thread
::{
self
,
Builder
}
;
use
std
::
thread
;
use
std
::
sync
::
Arc
;
use
std
::
sync
::
Arc
;
const
N
:
usize
=
100
_000
;
const
N
:
usize
=
100
_000
;
...
@@ -11,25 +11,25 @@ pub fn test_multithreaded() {
...
@@ -11,25 +11,25 @@ pub fn test_multithreaded() {
let
db
=
DB
::
open_default
(
path
)
.unwrap
();
let
db
=
DB
::
open_default
(
path
)
.unwrap
();
let
db
=
Arc
::
new
(
db
);
let
db
=
Arc
::
new
(
db
);
db
.put
(
b
"key"
,
b
"value1"
);
db
.put
(
b
"key"
,
b
"value1"
)
.unwrap
()
;
let
db1
=
db
.clone
();
let
db1
=
db
.clone
();
let
j1
=
thread
::
spawn
(
move
||
{
let
j1
=
thread
::
spawn
(
move
||
{
for
i
in
1
..
N
{
for
_
in
1
..
N
{
db1
.put
(
b
"key"
,
b
"value1"
);
db1
.put
(
b
"key"
,
b
"value1"
)
.unwrap
()
;
}
}
});
});
let
db2
=
db
.clone
();
let
db2
=
db
.clone
();
let
j2
=
thread
::
spawn
(
move
||
{
let
j2
=
thread
::
spawn
(
move
||
{
for
i
in
1
..
N
{
for
_
in
1
..
N
{
db2
.put
(
b
"key"
,
b
"value2"
);
db2
.put
(
b
"key"
,
b
"value2"
)
.unwrap
()
;
}
}
});
});
let
db3
=
db
.clone
();
let
db3
=
db
.clone
();
let
j3
=
thread
::
spawn
(
move
||
{
let
j3
=
thread
::
spawn
(
move
||
{
for
i
in
1
..
N
{
for
_
in
1
..
N
{
match
db3
.get
(
b
"key"
)
{
match
db3
.get
(
b
"key"
)
{
DBResult
::
Some
(
v
)
=>
{
DBResult
::
Some
(
v
)
=>
{
if
&
v
[
..
]
!=
b
"value1"
&&
&
v
[
..
]
!=
b
"value2"
{
if
&
v
[
..
]
!=
b
"value1"
&&
&
v
[
..
]
!=
b
"value2"
{
...
@@ -43,9 +43,9 @@ pub fn test_multithreaded() {
...
@@ -43,9 +43,9 @@ pub fn test_multithreaded() {
}
}
});
});
j1
.join
();
j1
.join
()
.unwrap
()
;
j2
.join
();
j2
.join
()
.unwrap
()
;
j3
.join
();
j3
.join
()
.unwrap
()
;
}
}
assert
!
(
DB
::
destroy
(
&
Options
::
new
(),
path
)
.is_ok
());
assert
!
(
DB
::
destroy
(
&
Options
::
new
(),
path
)
.is_ok
());
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment